Autoregressive Models

Imports

In [678]:
import sys
sys.path.insert(0, '../src/')

import warnings
warnings.filterwarnings('ignore')

%matplotlib inline

from datetime import date
import geopandas as gpd
from IPython.display import display, HTML
import matplotlib.gridspec as gridspec
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from pandas.plotting import lag_plot
from pandas.plotting import autocorrelation_plot
from statsmodels.tsa.ar_model import AR
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
from utils import load_pkl, generate_times
import seaborn as sns; sns.set()
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import StratifiedKFold
from metrics import *

from preprocessing import normalize

import tqdm as tqdm
from tqdm.auto import tqdm
tqdm.pandas()

# Imports classes
from Baseline import *
from Regressor import *
from utils import *

from IPython.core.interactiveshell import InteractiveShell
InteractiveShell.ast_node_interactivity = "all"

Loading Data

Contour Iris

In [624]:
contour_iris = gpd.read_file(
    '../datasets/iris/iris.shp')

convert_to_int = ['dep', 'insee_com', 'iris', 'code_iris']
for col in convert_to_int:
    contour_iris[col] = contour_iris[col].astype(int)

contour_iris = contour_iris[['code_iris', 'geometry', 'dep']]
contour_iris.head();

Stations and Dates

In [625]:
station_data = pd.read_csv("../datasets/station_to_iris.csv")
station_data.describe();
In [626]:
stations_mode = load_pkl("../datasets/stations_mode.pkl")
subway_stations = [k for k, v in stations_mode.items() if v == 3]
print("Number of Subway stations: {}".format(len(subway_stations)))
Number of Subway stations: 303

Subways stations with less than $80000$ validations per $3$ month. Note that this is before we normalize the data. In the article, they removed $3$ subways stations, assuming that it was closed for renovation work. We printed below the $4$ stations with smaller number of validations.

In [627]:
station_data[(station_data['id'].isin(subway_stations)) & (station_data['validations_count'] < 80000)];
In [628]:
dates = pd.date_range(start="2015-10-01", end="2015-12-31").date

Discretized Matrix

In [629]:
matrix_6h = np.load("../datasets/6h_matrix.npy")
matrix_2h = np.load("../datasets/2h_matrix.npy")
matrix_15m = np.load("../datasets/15m_matrix.npy")

Data Analysis and Preprocessing

In [630]:
f, ax = plt.subplots(1, figsize=(16, 12))
ax = contour_iris[contour_iris['dep'].isin([75, 92, 93, 94])].plot(
    ax=ax, edgecolor='black', column='dep', cmap='icefire_r')
ax.scatter(station_data[station_data['id'].isin(subway_stations)]['x'],
           station_data[station_data['id'].isin(subway_stations)]['y'], color='firebrick', label='Subway Stations')
ax.set_xlabel('Longitude')
ax.set_ylabel('Latitude')
ax.set_title('Subway Stations in ÃŽle de France')
ax.legend()

plt.show();

Min Max Normalization

Below we apply Min Max Normalization to data, with a scale range of $[0, 1]$.

In [631]:
data_matrix_6h = pd.Panel(normalize(matrix_6h), 
                         items=dates, 
                         major_axis=subway_stations, 
                         minor_axis=generate_times("6h")
                        )

data_matrix_2h = pd.Panel(normalize(matrix_2h), 
                         items=dates, 
                         major_axis=subway_stations, 
                         minor_axis=generate_times("2h")
                        )

data_matrix_15m_complete = pd.Panel(matrix_15m, 
                                    items=dates, 
                                    major_axis=subway_stations, 
                                    minor_axis=generate_times("15min")
                                   )

Delete the first $4$ hours, from $00.00.00$ to $04.00.00$ because it's useless, the number of validations in that range is mostly equal to 0.

In [776]:
del_hours = 4
In [777]:
data_matrix_15m = data_matrix_15m_complete.iloc[:, :, del_hours*4:]
In [778]:
data_matrix_15m.to_frame().head()
Out[778]:
2015-10-01 2015-10-02 2015-10-03 2015-10-04 2015-10-05 2015-10-06 2015-10-07 2015-10-08 2015-10-09 2015-10-10 ... 2015-12-22 2015-12-23 2015-12-24 2015-12-25 2015-12-26 2015-12-27 2015-12-28 2015-12-29 2015-12-30 2015-12-31
major minor
198 04:00:00 0.0 3.0 0.0 0.0 0.0 0.0 0.0 2.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
04:15:00 0.0 0.0 0.0 0.0 2.0 0.0 0.0 0.0 0.0 0.0 ... 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0
04:30:00 0.0 1.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 ... 1.0 1.0 2.0 1.0 0.0 1.0 0.0 0.0 0.0 0.0
04:45:00 1.0 2.0 0.0 2.0 1.0 0.0 1.0 0.0 0.0 0.0 ... 2.0 0.0 0.0 2.0 0.0 0.0 0.0 0.0 0.0 2.0
05:00:00 7.0 11.0 6.0 12.0 10.0 6.0 9.0 12.0 9.0 13.0 ... 11.0 9.0 7.0 1.0 5.0 10.0 12.0 10.0 10.0 5.0

5 rows × 92 columns

In [779]:
dmatrix_mean_6h = data_matrix_6h.mean()
dmatrix_mean_2h = data_matrix_2h.mean()
dmatrix_mean_15m = data_matrix_15m.mean()

dtmatrix_mean_6h = dmatrix_mean_6h.transpose()
dtmatrix_mean_2h = dmatrix_mean_2h.transpose()
dtmatrix_mean_15m = dmatrix_mean_15m.transpose()

Again, this is another way to print the stations with a small number of validations.

In [780]:
data_matrix_15m.mean(axis=0)[data_matrix_15m.mean(axis=0).sum(axis=1) < 810];
In [781]:
dmatrix_mean_15m.head()
dtmatrix_mean_15m.head()
Out[781]:
2015-10-01 2015-10-02 2015-10-03 2015-10-04 2015-10-05 2015-10-06 2015-10-07 2015-10-08 2015-10-09 2015-10-10 ... 2015-12-22 2015-12-23 2015-12-24 2015-12-25 2015-12-26 2015-12-27 2015-12-28 2015-12-29 2015-12-30 2015-12-31
04:00:00 0.112211 0.188119 0.036304 0.392739 0.072607 0.095710 0.099010 0.108911 0.108911 0.072607 ... 0.066007 0.102310 0.066007 0.029703 0.036304 0.013201 0.062706 0.089109 0.069307 0.075908
04:15:00 0.138614 0.138614 0.029703 0.554455 0.092409 0.082508 0.145215 0.072607 0.108911 0.072607 ... 0.075908 0.069307 0.056106 0.036304 0.013201 0.000000 0.085809 0.089109 0.062706 0.102310
04:30:00 0.174917 0.148515 0.059406 0.422442 0.075908 0.141914 0.141914 0.165017 0.105611 0.102310 ... 0.135314 0.135314 0.128713 0.112211 0.099010 0.079208 0.161716 0.108911 0.099010 0.079208
04:45:00 0.376238 0.462046 0.481848 1.072607 0.396040 0.501650 0.491749 0.584158 0.590759 0.478548 ... 0.468647 0.521452 0.442244 0.339934 0.346535 0.346535 0.432343 0.419142 0.409241 0.495050
05:00:00 2.643564 3.079208 2.458746 3.900990 2.973597 3.112211 3.293729 3.468647 3.254125 2.732673 ... 2.696370 3.207921 2.920792 1.217822 2.019802 1.795380 2.805281 2.993399 3.112211 2.798680

5 rows × 92 columns

Out[781]:
04:00:00 04:15:00 04:30:00 04:45:00 05:00:00 05:15:00 05:30:00 05:45:00 06:00:00 06:15:00 ... 21:30:00 21:45:00 22:00:00 22:15:00 22:30:00 22:45:00 23:00:00 23:15:00 23:30:00 23:45:00
2015-10-01 0.112211 0.138614 0.174917 0.376238 2.643564 19.864686 31.471947 31.874587 40.541254 48.481848 ... 70.927393 66.881188 65.399340 60.584158 60.755776 64.207921 75.128713 64.590759 52.138614 43.818482
2015-10-02 0.188119 0.138614 0.148515 0.462046 3.079208 22.399340 34.537954 34.650165 41.858086 50.056106 ... 78.590759 73.495050 70.907591 65.983498 66.211221 62.917492 66.330033 63.188119 57.755776 54.673267
2015-10-03 0.036304 0.029703 0.059406 0.481848 2.458746 14.254125 20.429043 19.184818 22.465347 24.531353 ... 82.211221 77.181518 73.755776 72.805281 74.570957 76.264026 83.947195 80.049505 76.587459 72.772277
2015-10-04 0.392739 0.554455 0.422442 1.072607 3.900990 15.920792 19.696370 16.963696 16.247525 17.610561 ... 43.570957 43.818482 41.745875 37.462046 36.722772 37.283828 52.759076 37.158416 31.211221 25.254125
2015-10-05 0.072607 0.092409 0.075908 0.396040 2.973597 20.858086 32.630363 32.729373 39.689769 52.072607 ... 61.039604 52.346535 53.125413 46.059406 45.498350 41.036304 41.254125 35.336634 28.003300 22.801980

5 rows × 80 columns

With Outliers

In [782]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dtmatrix_mean_15m.plot(ax=ax[0], legend=False)
ax1.set_xticklabels([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('15min')

ax2 = dtmatrix_mean_2h.plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Number of Validations')
ax2.set_title('2h')
ax2.legend(bbox_to_anchor=(1., 1.01))

ax3 = dtmatrix_mean_6h.plot(ax=ax[2])
ax3.set_xlabel('Days')
ax3.set_ylabel('Number of Validations')
ax3.set_title('6h')
ax3.legend(bbox_to_anchor=(1., 1.01))

plt.xticks(rotation=90)
plt.show();
In [783]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dtmatrix_mean_15m.plot.area(ax=ax[0], legend=False)
ax1.set_xticklabels([])
ax1.set_ylabel('Time')
ax1.set_title('15min')

ax2 = dtmatrix_mean_2h.plot.area(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Time')
ax2.set_title('2h')
ax2.legend(bbox_to_anchor=(1., 1.01))

ax3 = dtmatrix_mean_6h.plot.area(ax=ax[2])
ax3.set_xlabel('Days')
ax3.set_ylabel('Time')
ax3.set_title('6h')
ax3.legend(bbox_to_anchor=(1., 1.01), loc=2)

plt.xticks(rotation=90)
plt.show();
In [784]:
fig = plt.figure(figsize=(16, 6))
gs = gridspec.GridSpec(1, 1)
ax = fig.add_subplot(gs[0])
dmatrix_mean_15m.plot(ax=ax, legend=False)
plt.ylabel('Number of Validations')
plt.title('15min')

plt.xticks(rotation=90)
plt.show();
In [785]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = dmatrix_mean_15m.iloc[:, :31].plot(ax=ax[0])
ax1.set_xticklabels([])
ax1.set_ylabel('Days')
ax1.set_title('October\'s number of validations')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)


ax2 = dmatrix_mean_15m.iloc[:, 31:61].plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Days')
ax2.set_title('November\'s number of validations')
ax2.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax3 = dmatrix_mean_15m.iloc[:, 61:].plot(ax=ax[2])
ax3.set_xlabel('Time')
ax3.set_ylabel('Days')
ax3.set_title('December\'s number of validations')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();
In [786]:
f, ax = plt.subplots(2, figsize=(16, 12))

ax1 = dtmatrix_mean_15m.boxplot(return_type='both', ax=ax[0])
ax[0].set_xlabel("Time", fontsize=15)
ax[0].set_ylabel("Number of Validations", fontsize=15)

for tick in ax[0].get_xticklabels():
    tick.set_rotation(90)

ax2 = dmatrix_mean_15m.boxplot(return_type='both', ax=ax[1])
plt.xticks(rotation=90)

plt.tight_layout()
plt.show();

Defining useful variables

In [787]:
from __init__ import *
In [788]:
wd_15m = data_matrix_15m.loc[dict_w.values()]
wdm_15m = wd_15m.mean()
wdmt_15m = wdm_15m.transpose()

wd_15mf = data_matrix_15m.loc[dict_wd_final.values()]
wdm_15mf = wd_15mf.mean()
wdmt_15mf = wdm_15mf.transpose()

Without outliers

In [789]:
f, ax = plt.subplots(3, figsize=(16, 12))
ax1 = wdm_15m.loc[:, dict_wd_oct.values()].plot(ax=ax[0])
ax1.set_xticklabels([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('Octobre')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)


ax2 = wdm_15m.loc[:, dict_wd_nov.values()].plot(ax=ax[1])
ax2.set_xticklabels([])
ax2.set_ylabel('Number of Validations')
ax2.set_title('Novembre')
ax2.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax3 = wdm_15m.loc[:, dict_wd_dec.values()].plot(ax=ax[2])
ax3.set_xlabel('Time')
ax3.set_ylabel('Number of Validations')
ax3.set_title('Decembre')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();
In [790]:
f, ax = plt.subplots(2, figsize=(16, 8))

ax1 = wdm_15mf.loc[:, dict_wd_novf.values()].plot(ax=ax[0])
ax1.set_xticks([])
ax1.set_ylabel('Number of Validations')
ax1.set_title('Novembre')
ax1.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
           borderaxespad=0.)

ax2 = wdm_15mf.loc[:, dict_wd_decf.values()].plot(ax=ax[1])
ax2.set_xlabel('Time')
ax2.set_ylabel('Number of Validations')
ax2.set_title('Decembre')
plt.legend(bbox_to_anchor=(1., 0.9, 1.1, .102), loc=2,
           ncol=2, borderaxespad=0.)
plt.tight_layout()

plt.show();
In [791]:
f, ax = plt.subplots(2, figsize=(16, 12))

ax1 = wdmt_15mf.boxplot(return_type='both', ax=ax[0])
ax[0].set_xlabel("Time", fontsize=15)
ax[0].set_ylabel("Number of Validations", fontsize=15)

for tick in ax[0].get_xticklabels():
    tick.set_rotation(90)

ax2 = wdm_15mf.boxplot(return_type='both', ax=ax[1])
plt.xticks(rotation=90)

plt.tight_layout()
plt.show();
In [792]:
fig, (ax1, ax2) = plt.subplots(2, figsize=(16, 12))

wdm_15mf.plot(ax=ax1, legend=False)
ax1.set_ylabel('Number of Validations'); ax1.set_title('15min')

ax2 = wdmt_15mf.plot(ax=ax2, legend=False)
ax2.set_ylabel('Number of Validations'); ax2.set_title('15min')

plt.xticks(rotation=90)
plt.tight_layout()

plt.show();

Autocorrelation Plots

In [793]:
fig, (ax1, ax2) = plt.subplots(2, figsize=(16, 12))

autocorrelation_plot(wdmt_15mf.mean(), ax=ax1, c='blue')
ax1.set_title('15min discretization matrix')

plot_acf(wdmt_15mf.mean(), ax=ax2, c='blue', title='Auto Correlation')

plt.show();
In [794]:
plt.figure(figsize=(16, 7))

lag_plot(wdmt_15mf.mean(), c='blue')
plt.title('Lag plot 15min discretization matrix')

# plot_pacf(wdmt_15mf.mean(), ax=ax[1], c='blue', title='Partial Auto Correlation')

plt.show();

Splitting Data into Train and Test

In [795]:
dico = dict_wd
size = 45
In [796]:
X = data_matrix_15m.loc[dico.values()]
Xm = X.mean()
Xmt = Xm.transpose()
In [797]:
kw = list(dico.keys())
np.random.shuffle(kw)

vw = [dico[i] for i in kw]
In [798]:
ind_train = vw[:size]
ind_test = vw[size:]
X_train = X[ind_train]
X_test = X[ind_test]
In [799]:
X_train
X_test
Out[799]:
<class 'pandas.core.panel.Panel'>
Dimensions: 45 (items) x 303 (major_axis) x 80 (minor_axis)
Items axis: 2015-11-19 to 2015-11-03
Major_axis axis: 198 to 60982
Minor_axis axis: 04:00:00 to 23:45:00
Out[799]:
<class 'pandas.core.panel.Panel'>
Dimensions: 21 (items) x 303 (major_axis) x 80 (minor_axis)
Items axis: 2015-10-22 to 2015-12-09
Major_axis axis: 198 to 60982
Minor_axis axis: 04:00:00 to 23:45:00

Models

Baseline

In [800]:
def baseline_plot_results(levels):
    """
    
    """
    
    baseline_scores = []
    baseline_preds = []
    for level in levels:
        b = Baseline(level=level, first_ndays=5)
        b.fit(X_train)
        baseline_preds.append(b.predict(X_test))
        baseline_scores.append(b.score(X_test))
    
    df_baseline_scores = pd.DataFrame(np.array(baseline_scores).T,
                                 index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'],
                                 columns=levels)
    display(HTML(df_baseline_scores.to_html()))
    pd.DataFrame(df_baseline_scores.loc['RMSE'].values.repeat(4).reshape(-1, 4).T,
                 columns=levels).plot(figsize=(16, 4), kind='line');
    
    return df_baseline_scores, baseline_preds
In [801]:
levels = ["None", "s"]
df_baseline_scores, baseline_preds = baseline_plot_results(levels)
None s
R2 0.088356 0.906262
RMSE 352.087402 112.900559
MSE 123965.538361 12746.536192
MAE 1.805186 0.300423
MAPE 180.518622 30.042271
MPE -159.830890 -11.611731
In [802]:
from cost_functions import mse, mse_g
from sklearn.linear_model import LinearRegression, Lasso
class myAR(Regressor):
    def __init__(self, order=4, level=None, loss=mse, loss_g=mse_g, max_iter=1000,
                 eps=0.01):
        """ Initialisation des paramètres du perceptron

        :param order: Taille de la fenêtre glissante
        :param loss: fonction de coût
        :param loss_g: gradient de la fonction coût
        :param max_iter: nombre maximum d'itération de la fonction coût
        :param eps: pas du gradient


        """

        self.order = order
        self.level = level
        self.max_iter, self.eps = max_iter, eps
        self.loss, self.loss_g = loss, loss_g
        self.w = np.random.random(self.order)
                      
    
    @Regressor.datax_decorator
    def analytic_fit(self, datax):
        """ Finds the optimal weigths analytically 
        
        :param datax: contient tous les exemples du dataset
        :returns: void
        :rtype: None
        
        """
        
        self.reg = LinearRegression()
        _, self.X, self.y = datax
        A, B = self.X.T.dot(self.X), self.X.T.dot(self.y)
        self.w1 = np.linalg.solve(A, B).ravel()
        self.reg.fit(self.X, self.y)
        self.w = self.reg.coef_.squeeze()
        display(HTML(pd.DataFrame(self.w.reshape(1, -1), index=['Weights'], 
                                  columns=range(1, len(self.w)+1)).to_html()))
       
        return self

    def minibatch_fit(self, datax):
        """ Mini-Batch gradient descent Learning

        :param datax: contient tous les exemples du dataset
        
        """

        for _ in range(self.max_iter):
            for d in range(datax.shape[0]):
                for t in range(datax.shape[2] - self.order):
                    batchx = datax.iloc[d, :, t:t + self.order].values
                    batchy = datax.iloc[d, :, t + self.order].values
                    self.w -= (self.eps * self.loss_g(batchx, batchy, self.w))
                   

    def predict(self, datax):
        """ Predict labels

        :param datax: contient tous les exemples du dataset
        :returns: predicted labels
        :rtype: numpy array

        """

        y_pred = []
        for d in range(datax.shape[0]):
            y_pred.append([])
            for t in range(datax.shape[2] - self.order):
                batchx = datax.iloc[d, :, t:t + self.order].values
                y_pred[d].append(batchx.dot(self.w.T))

        return np.array(y_pred).transpose(0, 2, 1)

    def forecast_n(self, datax):
        """ Predict labels

        :param datax: contient tous les exemples du dataset
        :returns: predicted labels
        :rtype: numpy array

        """

        y_pred = []
        for d in range(datax.shape[0]):
            y_pred.append([])
            batchx = datax.iloc[d, :, 0:self.order].values
            for t in range(datax.shape[2] - self.order):
                next_y = batchx.dot(self.w.T)
                y_pred[d].append(next_y)
                batchx = np.hstack(
                    (batchx[:, 1:], np.array(next_y).reshape(-1, 1)))

        return np.array(y_pred).transpose(0, 2, 1)
    
    def transform_batchx(self, batchx, tplus):
        """
        """
        if tplus == 1:
            return batchx
        
        for _ in range(tplus-1):
            next_y = batchx.dot(self.w.T)
            if batchx.ndim == 2:
                batchx = np.hstack((batchx[:, 1:], 
                                    np.array(next_y).reshape(-1, 1)))
            elif batchx.ndim == 1:
                batchx = np.hstack((batchx[1:], next_y))
                
        return batchx

    def forecast(self, datax, tplus=None):
        """ Predict labels

        :param datax: contient tous les exemples du dataset
        :param tplus: if t equal to 2, means predicting what happened at t+2
        :returns: predicted labels
        :rtype: numpy array
        
        """
                
        if tplus == None or tplus > self.order:
            return self.forecast_n(datax)
        else:
            y_pred = []
            batch_ind = self.order - tplus
            
            if datax.ndim == 3:
                for d in range(datax.shape[0]):
                    y_pred.append([])
                    # Take the first batch
                    batchx = datax.iloc[d, :, 0:self.order].values
                    # Predict till we finish the first round of tplus
                    for _ in range(tplus):
                        next_y = batchx.dot(self.w.T)
                        y_pred[d].append(next_y)
                        batchx = np.hstack((batchx[:, 1:], 
                                            np.array(next_y).reshape(-1, 1)))
                        
                    # After the first round of tplus, we have to replace some
                    # predicted values by the real ones and simultaneously 
                    # replace the following columns by t+1,..., tplus
                    for t in range(1, datax.shape[2] - self.order - tplus + 1): 
                        batchx = self.transform_batchx(
                            datax.iloc[d, :, t:self.order+t].values, tplus)
                        next_y = batchx.dot(self.w.T)
                        # next_y = np.where(next_y < 0, 0, next_y)
                        y_pred[d].append(next_y)
            elif datax.ndim == 2:
                # TODO
                pass
            elif datax.ndim == 1:
                batchx = datax.iloc[0:self.order].values
                
                for _ in range(tplus):
                    next_y = batchx.dot(self.w.T)
                    y_pred.append(next_y)
                    batchx = np.hstack((batchx[1:], next_y))
                
                print(datax.shape[0])
                for t in range(1, datax.shape[0] - self.order - tplus + 1):
                    batchx = self.transform_batchx(
                            datax.iloc[t:self.order+t].values, tplus)
                    next_y = batchx.dot(self.w.T)
                    # if next_y < 0: next_y = 0
                    y_pred.append(next_y)
                    
                return np.array(y_pred)
            else:
                raise ValueError("Untreated datax number of dimensions")
                                        
        return np.array(y_pred).transpose(0, 2, 1)
    
    @Regressor.datax_decorator
    def again(self, datax):
        datax, self.X_test, self.y_test = datax
    
        y_pred = self.reg.predict(self.X_test)
        y_pred = y_pred.reshape((datax.shape[0] * datax.shape[1], 
                                datax.shape[2] - self.order), 
                                order='F').reshape((datax.shape[0],
                                                   datax.shape[1],
                                                   datax.shape[2] - self.order))
        
        return y_pred
    
    
    def reshaped(self, y_pred, datax):
        """
        """
        
        if datax.ndim == 3:
            return y_pred.reshape((datax.shape[0] * datax.shape[1], 
                                datax.shape[2] - self.order), 
                                order='F').reshape((datax.shape[0],
                                                   datax.shape[1],
                                                   datax.shape[2] - self.order))
        elif datax.ndim == 2:
            return y_pred.reshape((datax.shape[0], datax.shape[1] - self.order),
                                order='F')
    
    @Regressor.datax_decorator
    def forecast(self, datax, tplus):
        datax, self.X_test, self.y_test = datax
        
        if tplus == 1:
            return self.reshaped(self.reg.predict(self.X_test), datax)
            
        else:
            self.X_test = self.X_test.reshape(datax.shape[-1] - self.order, -1, self.order)
        
            tmp = self.X_test[0]
            y_pred = self.reg.predict(tmp)
            pred = y_pred.copy()

            for x in self.X_test[1:]:
                x[:, -1] = pred.squeeze()
                x[:, -tplus:-1] = tmp[:, -tplus+1:]
                tmp = x.copy()
                pred = self.reg.predict(tmp)
                y_pred = np.vstack((y_pred, pred))

            return self.reshaped(y_pred, datax)


    @Regressor.datax_decorator
    def again_s(self, datax):
        datax, self.X_test, self.y_test = datax
        
        y_pred = self.reg.predict(self.X_test)
        y_pred = y_pred.reshape((datax.shape[0], datax.shape[1] - self.order),
                                order='F')
        
        return y_pred
In [803]:
def panelIt(X_pred, X_test, order, subway_stations, del_hours=0):
    """

    """

    wd_testorder_15m = X_test.iloc[:, :, order:]
    minor_axis = generate_times("15min")[(del_hours * 4) + order:]
    
    return pd.Panel(X_pred,
                    items=wd_testorder_15m.items,
                    major_axis=subway_stations,
                    minor_axis=minor_axis)
In [804]:
class theAR(Baseline):
    station_id = 0
    def __init__(self, level=None, first_ndays=7, **kwargs):
        """
        
        """
        
        super().__init__(level, first_ndays)
        self.kwargs = kwargs
        
    def fit(self, datax):
        """
        
        """
        
        if self.level is None:
            self.model = myAR(**self.kwargs)
            self.model.analytic_fit(datax)
            
        elif self.level.lower() == "s":
            
            self.models = []            
            
            datax.apply(lambda station: self.models.append(
                myAR(**self.kwargs).analytic_fit(station.T)), 
                        axis=(0, 2))
        
        elif self.level.lower() == "j":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean().mean(axis=1))      
                
        elif self.level.lower() == "sj":
            # TODO
            self.mean = []
            for d in range(self.first_ndays):
                exist_ind = list(set(ddict_days[d].values()) & set(datax.items))
                self.mean.append(datax[exist_ind].mean(axis=0))
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def predict(self, datax, tplus=None):
        """
        
        """
        
        def predict_for_station(x, tplus):
            """
            """
            
            station_pred = self.models[self.station_id].forecast(x, tplus)
            self.station_id += 1
            
            return station_pred
        
        if self.level is None:
            
            X_pred = self.model.forecast(datax, tplus)

            self.scores = super().metrics_score(
                datax.iloc[:, :, self.model.order:], X_pred)
            
            return panelIt(X_pred, datax, self.model.order, subway_stations, del_hours)
            
        elif self.level.lower() == "s":
            
            X_pred = datax.apply(lambda x: predict_for_station(x.T, tplus), 
                                 axis=(0, 2)).transpose(1, 0, 2)
  
            self.scores = super().metrics_score(
                datax.iloc[:, :, self.models[0].order:], X_pred.values)
            
            self.station_id = 0
            
            return panelIt(X_pred.values, datax, self.models[0].order, subway_stations, del_hours)
        
        elif self.level.lower() == "j":
            # TODO
            pass
        elif self.level.lower() == "sj":
            # TODO
            pass
        else:
            raise ValueError("Unknown value for level attribute, \
            try: s, j, sj or None")
    
    
    def score(self, datax):
        """
        
        """
           
        return self.scores
    
In [851]:
def ar_plot_results(level, order, limit_t, X_train=X_train, X_test=X_test):
    """
    
    """
    
    ar_scores = []
    ar_preds = []
    ar = theAR(level=level, order=order)
    
    print("Fitting...")
    ar.fit(X_train)
    
    print("Predicting...")
    
    for t in range(1, limit_t+1):
        ar_preds.append(ar.predict(X_test, t))
        ar_scores.append(ar.score(X_test))
    
    display(HTML((pd.DataFrame(np.array(ar_scores).T, 
                               index=['R2', 'RMSE', 'MSE', 'MAE', 'MAPE', 'MPE'], 
                               columns=list(map(
                                   lambda x: "t+"+str(x),
                                   range(1, len(ar_scores)+1))))).to_html()))
    
    
    return ar_preds, ar_scores

def plot_qualitative_analysis(ar_preds, X_test, limit_t, order, subway_stations, del_hours):
    """
    
    """
    
    fig, ax = plt.subplots(limit_t+1, figsize=(16, limit_t*4))

    wd_testorder_15m = X_test.iloc[:, :, order:]
    wdm_testorder_15m = wd_testorder_15m.mean()

    wdm_testorder_15m.plot(ax=ax[0])
    ax[0].set_ylabel('Number of Validations')
    ax[0].set_title('Test')
    ax[0].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
               borderaxespad=0.)

    for i in range(limit_t):
        pred_t = ar_preds[i].mean()
        pred_t.plot(ax=ax[i+1])
        ax[i+1].set_ylabel('Number of Validations')
        ax[i+1].set_title("Predict t+{}".format(i+1))
        ax[i+1].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=2, loc=2,
                     borderaxespad=0.)

    plt.tight_layout()
    plt.show();
    
def plot_specific(X_test, baseline_preds, ar_preds, ar_preds_s, order, limit_t, j, s):
    
    fig, ax = plt.subplots(limit_t, figsize=(16, limit_t*5))

    for t in range(limit_t):
        ar_preds[t].iloc[j, s].plot(ax=ax[t], label='General AR')
        ar_preds_s[t].iloc[j, s].plot(ax=ax[t], label='AR By Station')
        X_test.iloc[j, s].plot(ax=ax[t], label="Real values")
        baseline_preds[0].iloc[j, s].plot(ax=ax[t], style=['.--'], label='General Baseline')
        baseline_preds[1].iloc[j, s].plot(ax=ax[t], style=['.--'], label='Baseline per station')
        ax[t].set_ylabel('Number of Validations')
        ax[t].set_title("AR models at t+{} with an order of {}".format(t+1, order))
        ax[t].legend(bbox_to_anchor=(1., 0.9, 1.1, .102), ncol=1, loc=2,
                     borderaxespad=0.)

    plt.tight_layout()
    plt.show();

Must solve it !

AR

In [909]:
order, limit_t = 8, 6
In [910]:
%%time
ar_preds, ar_scores = ar_plot_results(None, order, limit_t)
Fitting...
1 2 3 4 5 6 7 8
Weights 0.070019 0.031554 -0.020517 -0.204156 -0.021113 -0.189397 0.260882 1.036164
Predicting...
t+1 t+2 t+3 t+4 t+5 t+6
R2 0.957370 -1.897721e+11 -2.707099e+05 -4.560447e+04 -0.177865 -0.358642
RMSE 79.452884 1.676360e+08 2.002184e+05 8.217873e+04 417.636854 448.542396
MSE 6312.760834 2.810182e+16 4.008739e+10 6.753344e+09 174420.542151 201190.280657
MAE 0.300455 6.407530e+05 8.308151e+02 3.472627e+02 1.629857 1.608493
MAPE 30.045450 6.407530e+07 8.308151e+04 3.472627e+04 162.985654 160.849294
MPE -15.341521 1.857662e+07 9.990315e+03 1.735473e+03 -110.415746 -68.958435
CPU times: user 6.59 s, sys: 4.97 s, total: 11.6 s
Wall time: 7.26 s
In [911]:
%%time
ar_preds_s, ar_scores_s = ar_plot_results("s", order, limit_t)
Fitting...
1 2 3 4 5 6 7 8
Weights 0.042303 -0.017063 -0.031074 -0.11862 -0.196164 0.036483 0.343897 0.782647
1 2 3 4 5 6 7 8
Weights 0.034897 0.015443 -0.142616 -0.180472 0.088318 0.136462 0.209021 0.664526
1 2 3 4 5 6 7 8
Weights 0.033434 0.039575 0.024601 -0.230508 0.039955 -0.238658 0.136697 1.099139
1 2 3 4 5 6 7 8
Weights 0.057469 -0.097102 0.093379 -0.248583 0.029243 -0.106005 0.442003 0.675578
1 2 3 4 5 6 7 8
Weights 0.053731 -0.019808 -0.091028 -0.287792 0.339887 -0.019551 0.156429 0.804393
1 2 3 4 5 6 7 8
Weights 0.045625 -0.043262 0.108755 -0.283288 0.074198 -0.148527 0.079806 1.030857
1 2 3 4 5 6 7 8
Weights 0.084832 -0.082733 0.088624 -0.375395 0.07591 0.023042 0.350061 0.733861
1 2 3 4 5 6 7 8
Weights 0.051791 0.09073 -0.204275 -0.106399 -0.016596 -0.038877 0.069727 1.04774
1 2 3 4 5 6 7 8
Weights 0.055924 -0.032544 0.082709 -0.302569 0.069696 -0.09018 0.067345 1.04925
1 2 3 4 5 6 7 8
Weights -0.00862 0.036293 0.041917 -0.141827 0.045312 -0.252256 -0.08548 1.266875
1 2 3 4 5 6 7 8
Weights -0.001033 -0.043806 0.094182 -0.176528 -0.149042 -0.094784 0.624145 0.560499
1 2 3 4 5 6 7 8
Weights -0.005711 -0.000972 -0.024777 -0.143385 0.108687 -0.079647 0.200275 0.820617
1 2 3 4 5 6 7 8
Weights 0.039671 -0.281293 0.27425 -0.160431 0.272266 -0.069674 0.232616 0.587349
1 2 3 4 5 6 7 8
Weights -0.022054 -0.021931 0.054704 -0.192276 0.109737 -0.178309 0.415917 0.727429
1 2 3 4 5 6 7 8
Weights 0.104537 -0.058932 -0.080628 -0.283237 0.265152 -0.094775 0.235931 0.8258
1 2 3 4 5 6 7 8
Weights 0.041284 -0.01956 -0.010117 -0.274228 0.193847 -0.153024 0.080396 1.008888
1 2 3 4 5 6 7 8
Weights 0.160011 -0.069432 -0.173511 -0.22484 0.328085 -0.011396 0.347696 0.614228
1 2 3 4 5 6 7 8
Weights 0.095828 -0.030733 -0.086854 -0.165512 0.069335 -0.108688 0.218709 0.8974
1 2 3 4 5 6 7 8
Weights 0.078523 0.015829 -0.172275 -0.235298 0.14527 0.147183 0.287963 0.574397
1 2 3 4 5 6 7 8
Weights -0.089306 0.001111 -0.01293 -0.182822 0.157567 -0.002719 0.167392 0.851821
1 2 3 4 5 6 7 8
Weights -0.007357 0.02103 0.120106 -0.324524 0.195002 -0.324044 0.142949 1.053069
1 2 3 4 5 6 7 8
Weights 0.104016 -0.076067 0.009386 -0.180803 -0.043093 -0.064691 0.212107 0.922945
1 2 3 4 5 6 7 8
Weights 0.134366 -0.030926 -0.117383 -0.202505 0.066068 -0.151766 0.311817 0.872434
1 2 3 4 5 6 7 8
Weights -0.009963 0.085481 -0.057746 -0.127591 0.091516 -0.327998 0.124889 1.096243
1 2 3 4 5 6 7 8
Weights 0.005578 0.032794 -0.032528 -0.076568 -0.110723 -0.0612 0.002769 1.130864
1 2 3 4 5 6 7 8
Weights 0.116215 -0.083861 -0.12713 -0.26398 0.275124 -0.150768 0.395771 0.732405
1 2 3 4 5 6 7 8
Weights 0.157134 -0.142185 -0.112126 -0.353514 0.287862 0.026521 0.30365 0.741249
1 2 3 4 5 6 7 8
Weights 0.07328 -0.071616 -0.081079 -0.104043 -0.088859 0.074378 0.31218 0.789225
1 2 3 4 5 6 7 8
Weights 0.079027 -0.076796 -0.07142 -0.193198 0.247896 -0.180451 0.353598 0.737001
1 2 3 4 5 6 7 8
Weights 0.029594 -0.012295 -0.060897 -0.276678 0.170815 0.008018 0.263681 0.702833
1 2 3 4 5 6 7 8
Weights -0.012914 -0.158782 0.285469 -0.362255 0.265446 -0.219038 0.119591 0.953156
1 2 3 4 5 6 7 8
Weights 0.05088 0.012695 -0.010397 -0.280972 0.102178 -0.265366 0.379741 0.933259
1 2 3 4 5 6 7 8
Weights 0.016683 -0.077541 0.231638 -0.284617 0.042422 -0.276883 0.160489 1.06651
1 2 3 4 5 6 7 8
Weights 0.092785 0.022343 -0.093473 -0.349237 0.186303 -0.067844 0.200868 0.896568
1 2 3 4 5 6 7 8
Weights 0.015438 0.010715 0.021718 -0.253876 0.126056 -0.149204 0.253413 0.866309
1 2 3 4 5 6 7 8
Weights 0.048104 -0.008775 -0.171165 -0.299546 0.388455 0.124063 0.072175 0.676776
1 2 3 4 5 6 7 8
Weights 0.017443 0.057566 -0.066622 -0.407118 0.303222 -0.03957 0.25092 0.800824
1 2 3 4 5 6 7 8
Weights 0.089619 -0.055411 -0.083142 -0.24558 0.160755 0.064769 0.165225 0.763372
1 2 3 4 5 6 7 8
Weights 0.106001 -0.044074 -0.071504 -0.192012 0.065697 -0.08286 0.242553 0.82114
1 2 3 4 5 6 7 8
Weights 0.00196 0.078873 0.023797 -0.432915 0.33795 -0.292859 0.206078 0.996736
1 2 3 4 5 6 7 8
Weights 0.065808 0.012029 -0.03898 -0.378734 0.297758 -0.250681 0.196233 1.00534
1 2 3 4 5 6 7 8
Weights 0.057115 -0.122714 -0.110248 -0.106153 0.161708 0.09028 0.353855 0.575243
1 2 3 4 5 6 7 8
Weights -0.453783 0.481478 0.068904 -0.085228 0.126505 0.003032 0.03713 0.698181
1 2 3 4 5 6 7 8
Weights 0.034762 -0.076982 -0.03895 -0.271898 0.123558 0.179865 0.227637 0.698743
1 2 3 4 5 6 7 8
Weights 0.013902 -0.023526 -0.011648 -0.207032 0.019103 0.028789 0.205087 0.818158
1 2 3 4 5 6 7 8
Weights 0.067022 -0.031332 -0.095211 -0.152851 0.025461 -0.020853 0.255868 0.836249
1 2 3 4 5 6 7 8
Weights -0.040692 0.029874 0.185655 -0.262841 0.078659 -0.457406 0.276935 1.081509
1 2 3 4 5 6 7 8
Weights 0.091092 -0.051238 -0.080519 -0.140037 0.083236 -0.032169 0.21018 0.787567
1 2 3 4 5 6 7 8
Weights 0.011953 0.029275 0.067197 -0.321186 0.154965 -0.169891 0.08166 1.064194
1 2 3 4 5 6 7 8
Weights 0.030117 0.003568 -0.006371 -0.200967 -0.045281 -0.102461 0.292365 0.911741
1 2 3 4 5 6 7 8
Weights 0.066692 -0.029799 -0.107215 -0.330443 0.433286 0.11979 0.117774 0.628725
1 2 3 4 5 6 7 8
Weights 0.055696 -0.017736 -0.02452 -0.34562 0.312002 -0.192925 0.073696 1.027317
1 2 3 4 5 6 7 8
Weights 0.090542 -0.101825 -0.000364 -0.230083 0.096785 -0.020824 0.343007 0.676157
1 2 3 4 5 6 7 8
Weights -0.135885 -0.020034 0.067694 -0.047278 0.043465 -0.01695 0.121896 0.886621
1 2 3 4 5 6 7 8
Weights 0.068292 0.050841 -0.122818 -0.424182 0.466165 -0.262257 0.19359 0.947256
1 2 3 4 5 6 7 8
Weights -0.020826 -0.033183 -0.054523 -0.067659 -0.103724 0.14772 0.418624 0.561492
1 2 3 4 5 6 7 8
Weights 0.051841 -0.031566 -0.127915 -0.174158 0.173842 0.266805 0.083284 0.5553
1 2 3 4 5 6 7 8
Weights 0.02448 0.015704 -0.067114 -0.172704 0.081354 -0.116147 0.056284 1.087806
1 2 3 4 5 6 7 8
Weights 0.055458 -0.077331 -0.055672 -0.13671 0.219013 -0.047516 0.178726 0.707349
1 2 3 4 5 6 7 8
Weights -0.031005 0.045853 -0.023863 0.021213 -0.027398 -0.241251 -0.04938 1.183582
1 2 3 4 5 6 7 8
Weights 0.055626 -0.090227 -0.041337 -0.139464 0.08451 -0.036121 0.30249 0.736899
1 2 3 4 5 6 7 8
Weights 0.154969 -0.049433 -0.047188 -0.447597 0.267446 -0.044489 0.073546 1.005959
1 2 3 4 5 6 7 8
Weights 0.001192 -0.105382 -0.13337 -0.080987 0.25579 0.192587 0.109924 0.587924
1 2 3 4 5 6 7 8
Weights 0.08428 -0.086049 -0.03271 -0.275044 0.362624 -0.026256 0.212872 0.66947
1 2 3 4 5 6 7 8
Weights 0.039839 -0.025861 -0.12521 -0.168291 -0.01609 0.136177 0.348054 0.674303
1 2 3 4 5 6 7 8
Weights 0.032303 0.049626 -0.006658 -0.240536 0.243969 0.02201 -0.070381 0.865916
1 2 3 4 5 6 7 8
Weights -0.000358 0.07536 0.000547 -0.26902 0.127368 -0.197236 0.030035 1.113453
1 2 3 4 5 6 7 8
Weights 0.064926 -0.047317 -0.00541 -0.131443 -0.135033 -0.022567 0.348328 0.773147
1 2 3 4 5 6 7 8
Weights 0.019477 0.082415 -0.000993 -0.356189 0.177146 -0.150832 -0.077068 1.21852
1 2 3 4 5 6 7 8
Weights 0.107091 0.061819 -0.207165 -0.33609 0.333281 0.000407 0.142763 0.791795
1 2 3 4 5 6 7 8
Weights 0.004768 -0.051638 0.183316 -0.230545 0.071566 -0.218983 -0.099867 1.215837
1 2 3 4 5 6 7 8
Weights -0.011851 -0.069211 -0.096358 -0.013206 -0.014635 0.087817 0.350102 0.586095
1 2 3 4 5 6 7 8
Weights 0.090048 -0.039155 -0.060772 -0.266455 0.021266 -0.054722 0.456161 0.731356
1 2 3 4 5 6 7 8
Weights 0.030337 0.016434 0.039604 -0.337013 0.155207 -0.03166 -0.104068 1.121301
1 2 3 4 5 6 7 8
Weights -0.010882 0.041425 -0.109484 -0.105673 0.020129 -0.083262 0.347839 0.699913
1 2 3 4 5 6 7 8
Weights 0.05772 -0.094994 -0.046604 -0.128922 0.112879 0.05979 0.353931 0.589962
1 2 3 4 5 6 7 8
Weights -0.10101 -0.094689 -0.054335 -0.097224 0.408935 0.077941 0.034429 0.389733
1 2 3 4 5 6 7 8
Weights 0.080706 -0.048251 -0.020885 -0.426118 0.237616 -0.063524 0.44205 0.700459
1 2 3 4 5 6 7 8
Weights -0.008431 -0.009199 -0.055245 -0.124622 -0.041058 -0.044443 0.343873 0.846701
1 2 3 4 5 6 7 8
Weights 0.014003 0.010727 0.037332 -0.166634 -0.075925 -0.141453 0.227221 0.977424
1 2 3 4 5 6 7 8
Weights 0.016211 -0.065205 0.02802 -0.174864 0.089447 0.031738 0.265198 0.621397
1 2 3 4 5 6 7 8
Weights -0.017577 0.071961 -0.065486 -0.125829 -0.067833 -0.049027 0.307052 0.823252
1 2 3 4 5 6 7 8
Weights 0.026981 -0.015441 0.128592 -0.281818 0.109659 -0.360411 0.304384 0.975456
1 2 3 4 5 6 7 8
Weights 0.032454 0.015685 0.069703 -0.376072 0.17526 -0.043717 -0.093797 1.100148
1 2 3 4 5 6 7 8
Weights -0.035374 -0.083113 0.119813 -0.087713 0.132331 -0.065879 -0.035349 0.90659
1 2 3 4 5 6 7 8
Weights 0.073468 -0.016165 -0.020059 -0.253916 0.045609 -0.103032 0.135127 1.029188
1 2 3 4 5 6 7 8
Weights 0.004301 -0.039944 0.074869 -0.1357 0.063776 -0.300898 0.147151 1.034706
1 2 3 4 5 6 7 8
Weights 0.064832 0.054368 -0.10678 -0.293615 0.039069 -0.027708 0.29882 0.875899
1 2 3 4 5 6 7 8
Weights 0.055231 -0.09189 0.04896 -0.231008 0.058258 -0.024577 0.246982 0.785109
1 2 3 4 5 6 7 8
Weights 0.021849 0.003254 0.021762 -0.170696 -0.025549 -0.206128 0.27407 0.952866
1 2 3 4 5 6 7 8
Weights -0.020948 -0.027719 -0.047966 0.012696 0.056278 0.166832 0.216507 0.388958
1 2 3 4 5 6 7 8
Weights 0.037527 -0.037633 -0.032661 -0.136722 0.035309 -0.04687 0.3401 0.689511
1 2 3 4 5 6 7 8
Weights 0.018011 0.054292 -0.000305 -0.212836 0.145238 -0.354636 0.094606 1.160038
1 2 3 4 5 6 7 8
Weights -0.102626 0.146211 0.15839 -0.298231 0.060596 -0.436717 0.314524 1.040184
1 2 3 4 5 6 7 8
Weights 0.009503 -0.008299 -0.028486 -0.141898 -0.056035 -0.022654 0.229333 0.851911
1 2 3 4 5 6 7 8
Weights 0.068159 -0.112997 0.004456 -0.303627 0.221845 -0.059003 0.264148 0.846679
1 2 3 4 5 6 7 8
Weights -0.051185 0.082704 0.042093 -0.228136 0.063655 -0.262177 0.334385 0.858881
1 2 3 4 5 6 7 8
Weights 0.070643 -0.001524 -0.063058 -0.23293 0.156297 -0.176005 0.268133 0.868317
1 2 3 4 5 6 7 8
Weights 0.029693 -0.014268 0.10778 -0.250407 0.047034 -0.243453 0.120955 1.086121
1 2 3 4 5 6 7 8
Weights -0.007331 0.112005 -0.046613 -0.159615 -0.079682 -0.169053 0.19878 1.041571
1 2 3 4 5 6 7 8
Weights 0.114904 -0.039793 -0.069719 -0.333403 0.174135 -0.095271 0.371018 0.775135
1 2 3 4 5 6 7 8
Weights -0.042928 0.079169 0.053701 -0.315738 0.151629 -0.357172 0.424621 0.881549
1 2 3 4 5 6 7 8
Weights 0.070704 -0.06562 -0.001397 -0.254969 0.169166 -0.18708 0.359265 0.796143
1 2 3 4 5 6 7 8
Weights 0.089359 -0.121389 -0.011014 -0.246259 0.194025 -0.036422 0.36944 0.676221
1 2 3 4 5 6 7 8
Weights -0.028788 0.077699 -0.069064 -0.174697 0.017486 -0.077163 0.204294 0.897133
1 2 3 4 5 6 7 8
Weights 0.1655 -0.110275 -0.065106 -0.379778 0.294745 -0.109226 0.192566 0.959542
1 2 3 4 5 6 7 8
Weights 0.112093 0.002233 -0.116337 -0.25699 0.12505 0.01477 0.152561 0.808971
1 2 3 4 5 6 7 8
Weights -0.013344 0.097927 -0.011642 -0.272142 0.087145 -0.070114 0.104626 0.966232
1 2 3 4 5 6 7 8
Weights 0.00897 -0.023353 -0.09827 -0.098245 -0.010462 0.051733 0.333361 0.727809
1 2 3 4 5 6 7 8
Weights 0.069611 -0.000887 -0.002564 -0.157486 -0.065262 -0.156572 0.151833 1.076539
1 2 3 4 5 6 7 8
Weights 0.072205 -0.04252 -0.047809 -0.228583 0.034886 0.008935 0.432095 0.617187
1 2 3 4 5 6 7 8
Weights 0.108648 0.008361 -0.121143 -0.239386 -0.034935 0.000391 0.352039 0.778524
1 2 3 4 5 6 7 8
Weights 0.042603 0.055906 -0.177453 -0.207275 0.095426 0.089863 0.13777 0.813758
1 2 3 4 5 6 7 8
Weights 0.118008 0.034489 -0.201436 -0.359259 0.298565 -0.206778 0.458354 0.787191
1 2 3 4 5 6 7 8
Weights 0.072924 -0.101336 -0.025818 -0.162241 0.093251 -0.010121 0.385182 0.615253
1 2 3 4 5 6 7 8
Weights -0.034539 -0.018946 0.174475 -0.262174 0.048255 -0.14464 0.155745 0.919988
1 2 3 4 5 6 7 8
Weights 0.091619 -0.047208 -0.156662 -0.193072 0.081133 0.046807 0.184965 0.891871
1 2 3 4 5 6 7 8
Weights 0.066312 -0.089354 -0.028359 -0.131239 0.073391 -0.071854 0.322368 0.763647
1 2 3 4 5 6 7 8
Weights 0.047468 -0.071104 -0.029855 -0.136539 0.095903 -0.125524 0.214771 0.881132
1 2 3 4 5 6 7 8
Weights 0.025595 -0.013948 -0.072223 -0.111856 0.019371 0.003859 0.323804 0.622091
1 2 3 4 5 6 7 8
Weights 0.046765 -0.020993 0.040336 -0.255465 0.119996 -0.183415 0.173664 0.919614
1 2 3 4 5 6 7 8
Weights 0.017289 0.136342 -0.093543 -0.551663 0.540777 -0.261054 0.148385 0.936734
1 2 3 4 5 6 7 8
Weights 0.03374 0.073421 -0.145956 -0.127942 -0.082111 0.056904 0.193533 0.852934
1 2 3 4 5 6 7 8
Weights 0.065963 -0.061224 0.049586 -0.304931 0.153748 -0.098383 0.189141 0.859142
1 2 3 4 5 6 7 8
Weights 0.075946 0.012578 -0.071857 -0.269595 0.162266 -0.257408 0.350338 0.893355
1 2 3 4 5 6 7 8
Weights 0.160611 -0.177128 -0.106427 -0.39269 0.353308 0.276033 0.139452 0.671539
1 2 3 4 5 6 7 8
Weights 0.026576 -0.076575 0.081557 -0.201783 0.034742 -0.101439 0.239069 0.883106
1 2 3 4 5 6 7 8
Weights 0.070064 -0.072503 -0.100334 -0.207757 0.210877 0.010136 0.216639 0.810435
1 2 3 4 5 6 7 8
Weights 0.07409 -0.133988 0.020134 -0.193358 0.0768 -0.099428 0.43716 0.717785
1 2 3 4 5 6 7 8
Weights 0.111307 0.025599 -0.251248 -0.270916 0.235918 -0.182764 0.575248 0.668317
1 2 3 4 5 6 7 8
Weights 0.078451 -0.019392 0.035541 -0.352814 0.164606 -0.184446 0.254302 0.902572
1 2 3 4 5 6 7 8
Weights 0.107212 -0.177302 0.069484 -0.34915 0.333894 0.254013 0.044009 0.597156
1 2 3 4 5 6 7 8
Weights 0.063401 0.003472 -0.081455 -0.179113 0.065059 -0.128014 0.180474 0.962597
1 2 3 4 5 6 7 8
Weights 0.035218 0.022523 -0.036733 -0.428236 0.319336 -0.18703 0.20681 0.963687
1 2 3 4 5 6 7 8
Weights 0.046547 -0.038741 -0.129192 -0.115883 0.056301 0.047897 0.079477 0.924942
1 2 3 4 5 6 7 8
Weights 0.056601 -0.034125 0.082893 -0.372008 0.266731 -0.260992 0.304698 0.869335
1 2 3 4 5 6 7 8
Weights -0.045718 -0.000943 -0.008038 -0.095446 0.04829 -0.015282 0.116784 0.942117
1 2 3 4 5 6 7 8
Weights 0.005066 0.008539 0.112888 -0.164288 -0.084565 -0.345104 0.347877 1.008896
1 2 3 4 5 6 7 8
Weights 0.066602 0.003031 -0.157772 -0.317429 0.346098 -0.098196 0.22303 0.874086
1 2 3 4 5 6 7 8
Weights 0.099717 -0.089261 -0.049772 -0.107129 0.082539 -0.021302 0.314914 0.656355
1 2 3 4 5 6 7 8
Weights 0.016452 -0.158971 0.105371 -0.173252 0.170092 -0.109489 0.173948 0.867151
1 2 3 4 5 6 7 8
Weights 0.031613 0.046806 0.01711 -0.222364 0.010917 -0.223228 0.146849 1.091483
1 2 3 4 5 6 7 8
Weights 0.034557 0.048607 -0.162507 -0.283623 0.184226 -0.034769 0.30919 0.806818
1 2 3 4 5 6 7 8
Weights 0.067984 0.003936 0.003823 -0.318025 0.085346 -0.094837 0.240429 0.880822
1 2 3 4 5 6 7 8
Weights 0.078532 -0.02089 -0.038609 -0.149484 -0.004736 -0.169277 0.255577 0.92931
1 2 3 4 5 6 7 8
Weights 0.097342 -0.066717 -0.065507 -0.267054 0.177876 -0.046883 0.338783 0.686777
1 2 3 4 5 6 7 8
Weights -0.127452 -0.019745 -0.002353 0.003673 0.055709 -0.037748 0.156092 0.828559
1 2 3 4 5 6 7 8
Weights 0.068369 -0.103163 0.07141 -0.210297 0.073084 -0.156798 0.249118 0.924494
1 2 3 4 5 6 7 8
Weights -0.003734 0.113648 -0.165564 -0.160195 0.045266 0.014464 -0.028452 1.076096
1 2 3 4 5 6 7 8
Weights 0.073684 0.004378 -0.102778 -0.290815 0.235035 -0.240881 0.405067 0.799387
1 2 3 4 5 6 7 8
Weights 0.011162 0.094686 -0.113001 -0.181641 -0.030139 -0.082279 0.27825 0.932652
1 2 3 4 5 6 7 8
Weights 0.046142 -0.039734 0.044044 -0.258417 0.159972 -0.189942 0.097366 1.028213
1 2 3 4 5 6 7 8
Weights 0.099969 0.059329 -0.173782 -0.409235 0.393188 -0.138236 0.312822 0.708038
1 2 3 4 5 6 7 8
Weights -0.019415 0.030183 -0.127634 -0.168872 0.104931 -0.022218 0.295146 0.802969
1 2 3 4 5 6 7 8
Weights 0.036897 -0.052536 -0.094775 -0.160161 0.055016 0.024591 0.339626 0.720136
1 2 3 4 5 6 7 8
Weights 0.042916 -0.046725 -0.003162 -0.136279 0.045327 -0.029216 0.30689 0.677606
1 2 3 4 5 6 7 8
Weights 0.019255 -0.08865 -0.035214 -0.194987 0.222761 0.021974 0.259173 0.694388
1 2 3 4 5 6 7 8
Weights -0.067536 0.099834 0.079224 -0.137571 -0.129514 -0.207518 0.125448 1.11651
1 2 3 4 5 6 7 8
Weights -0.16854 0.134131 0.127108 -0.269538 0.042639 -0.081217 0.119537 0.915629
1 2 3 4 5 6 7 8
Weights 0.094723 -0.013957 -0.1493 -0.191922 0.062207 0.131077 0.121513 0.863682
1 2 3 4 5 6 7 8
Weights 0.065747 0.040326 -0.165319 -0.393135 0.460443 -0.188384 0.221728 0.889336
1 2 3 4 5 6 7 8
Weights 0.066629 -0.041373 0.059492 -0.269152 0.085674 -0.222592 0.26735 0.952859
1 2 3 4 5 6 7 8
Weights 0.03967 -0.019501 -0.037921 -0.2298 0.098114 0.029676 0.090409 0.934331
1 2 3 4 5 6 7 8
Weights 0.076205 -0.047568 -0.054428 -0.135811 -0.072153 -0.007644 0.311457 0.811561
1 2 3 4 5 6 7 8
Weights 0.016752 -0.04849 -0.072491 -0.163486 0.180308 -0.144332 0.440258 0.681918
1 2 3 4 5 6 7 8
Weights 0.050585 -0.111377 0.04703 -0.150122 0.06093 -0.157875 0.23247 0.92719
1 2 3 4 5 6 7 8
Weights 0.059932 -0.108336 -0.06936 -0.217109 0.269329 -0.042293 0.251968 0.774891
1 2 3 4 5 6 7 8
Weights -0.025636 0.020086 -0.107774 -0.148008 0.0616 0.092455 0.343464 0.62952
1 2 3 4 5 6 7 8
Weights 0.071194 -0.008767 -0.080671 -0.189748 0.077485 -0.119099 0.218318 0.953066
1 2 3 4 5 6 7 8
Weights 0.068988 -0.021148 0.024768 -0.219566 -0.004406 -0.054746 0.224902 0.864012
1 2 3 4 5 6 7 8
Weights 0.050221 -0.003681 -0.0368 -0.210307 -0.009365 -0.051553 0.203039 0.9418
1 2 3 4 5 6 7 8
Weights 0.063568 0.009582 -0.012375 -0.202202 -0.106328 -0.077103 0.250393 0.982106
1 2 3 4 5 6 7 8
Weights 0.015323 0.047674 0.008115 -0.22691 0.019597 -0.109884 0.068042 1.078557
1 2 3 4 5 6 7 8
Weights -0.03286 0.117338 -0.009808 -0.124832 -0.176169 -0.153572 0.299832 0.984917
1 2 3 4 5 6 7 8
Weights 0.106377 -0.021258 -0.112084 -0.23258 0.150501 -0.075285 0.121174 0.923684
1 2 3 4 5 6 7 8
Weights 0.001597 -0.037781 0.061945 -0.008733 -0.112659 -0.286196 0.085252 1.201949
1 2 3 4 5 6 7 8
Weights 0.092535 -0.042794 -0.113945 -0.248189 0.204605 -0.039034 0.25109 0.77185
1 2 3 4 5 6 7 8
Weights 0.046002 0.054449 -0.016793 -0.243421 -0.075002 -0.062291 0.308522 0.892653
1 2 3 4 5 6 7 8
Weights 0.084199 -0.053582 0.033123 -0.343235 0.1741 -0.126103 0.261803 0.892992
1 2 3 4 5 6 7 8
Weights 0.023643 0.042031 -0.005518 -0.163002 -0.156499 -0.081557 0.294653 0.931856
1 2 3 4 5 6 7 8
Weights -0.038231 0.019795 -0.112694 0.040966 -0.023385 0.196822 -0.405961 1.124071
1 2 3 4 5 6 7 8
Weights 0.016305 0.004908 -0.018143 -0.097388 -0.108762 -0.157719 0.447516 0.746555
1 2 3 4 5 6 7 8
Weights 0.099339 0.030425 -0.223948 -0.321873 0.315888 -0.165389 0.596254 0.559455
1 2 3 4 5 6 7 8
Weights 0.013038 -0.010106 0.092221 -0.162152 -0.085101 -0.21962 0.293571 0.954654
1 2 3 4 5 6 7 8
Weights 0.043342 -0.077944 -0.032581 -0.233997 0.133075 0.065966 0.301157 0.709102
1 2 3 4 5 6 7 8
Weights -0.007591 -0.017852 0.059859 0.07775 0.034395 0.055816 0.033689 0.529917
1 2 3 4 5 6 7 8
Weights 0.025859 0.026407 -0.032395 -0.31608 0.238295 -0.191142 0.286968 0.805293
1 2 3 4 5 6 7 8
Weights 0.024718 0.126319 -0.154193 -0.264611 0.257718 -0.302228 0.055019 1.193736
1 2 3 4 5 6 7 8
Weights 0.047591 -0.017697 -0.018346 -0.204778 0.071005 -0.121209 0.248024 0.878782
1 2 3 4 5 6 7 8
Weights 0.030273 0.009498 0.001624 -0.25728 0.090196 -0.21243 0.416827 0.783899
1 2 3 4 5 6 7 8
Weights 0.045075 0.016469 -0.160973 -0.222265 0.23161 -0.009532 0.086468 0.861927
1 2 3 4 5 6 7 8
Weights 0.062363 0.016729 -0.079091 -0.174791 -0.078796 0.01308 0.182883 0.957073
1 2 3 4 5 6 7 8
Weights 0.031015 -0.009309 -0.075416 -0.166211 0.096168 -0.080269 0.212836 0.932336
1 2 3 4 5 6 7 8
Weights 0.033358 0.025899 -0.070287 -0.212152 0.095666 0.00665 0.107927 0.860394
1 2 3 4 5 6 7 8
Weights 0.078437 -0.066847 -0.074537 -0.113188 0.024219 -0.036385 0.194263 0.816228
1 2 3 4 5 6 7 8
Weights 0.059346 -0.01983 0.000211 -0.333369 0.162612 -0.05214 0.239182 0.885502
1 2 3 4 5 6 7 8
Weights 0.091594 -0.015304 -0.078831 -0.275149 0.202257 -0.198026 0.347937 0.81746
1 2 3 4 5 6 7 8
Weights 0.135733 -0.088985 -0.03942 -0.296081 0.091497 -0.010077 0.28114 0.814076
1 2 3 4 5 6 7 8
Weights 0.061967 -0.021345 0.050688 -0.245309 -0.043476 -0.098986 0.290142 0.869093
1 2 3 4 5 6 7 8
Weights 0.021323 -0.047915 -0.10775 0.023472 0.049377 0.060935 0.054861 0.725146
1 2 3 4 5 6 7 8
Weights 0.029251 -0.013476 -0.010634 -0.146167 0.030109 -0.205483 0.197923 1.015078
1 2 3 4 5 6 7 8
Weights 0.045511 -0.032176 0.054704 -0.213975 -0.00134 -0.158604 0.37926 0.83185
1 2 3 4 5 6 7 8
Weights 0.202961 -0.024701 -0.188432 -0.55294 0.43761 -0.118111 0.26442 0.927711
1 2 3 4 5 6 7 8
Weights 0.000673 0.074998 -0.014447 -0.246012 0.084602 -0.110158 -0.026865 1.126788
1 2 3 4 5 6 7 8
Weights 0.065087 -0.078958 -0.108094 -0.2271 0.229333 -0.102748 0.377203 0.728994
1 2 3 4 5 6 7 8
Weights 0.107448 -0.014232 -0.240673 -0.331864 0.393794 -0.161777 0.356904 0.795378
1 2 3 4 5 6 7 8
Weights 0.046833 0.007129 0.017656 -0.20119 -0.072015 -0.101634 0.29769 0.884969
1 2 3 4 5 6 7 8
Weights 0.042345 -0.078094 -0.117466 0.00468 0.041472 0.029525 0.216683 0.651733
1 2 3 4 5 6 7 8
Weights 0.011561 0.02649 -0.085269 -0.43842 0.448419 -0.065345 0.280973 0.713559
1 2 3 4 5 6 7 8
Weights -0.031936 0.057065 0.01094 -0.228771 -0.076353 -0.055399 0.468444 0.671008
1 2 3 4 5 6 7 8
Weights 0.07061 0.032328 -0.100318 -0.216179 0.022508 -0.044086 0.216037 0.919965
1 2 3 4 5 6 7 8
Weights 0.129206 -0.199798 -0.05854 -0.307926 0.500577 0.19308 0.104436 0.535169
1 2 3 4 5 6 7 8
Weights 0.070882 0.132612 -0.301477 -0.388039 0.458842 -0.304153 0.432348 0.818924
1 2 3 4 5 6 7 8
Weights 0.096695 0.017031 -0.149779 -0.374952 0.273963 -0.103195 0.352136 0.76805
1 2 3 4 5 6 7 8
Weights 0.057079 0.073379 -0.105869 -0.298071 0.195745 -0.203647 0.168661 1.023551
1 2 3 4 5 6 7 8
Weights 0.069728 -0.104705 0.08529 -0.259474 0.178466 -0.061265 0.076866 0.892051
1 2 3 4 5 6 7 8
Weights 0.039761 0.016799 -0.082545 -0.290929 0.290017 -0.227617 0.319754 0.818681
1 2 3 4 5 6 7 8
Weights 0.036601 -0.045743 -0.112393 -0.101399 0.118102 0.220285 0.060675 0.607035
1 2 3 4 5 6 7 8
Weights 0.043934 -0.138565 0.026779 -0.216024 0.157394 0.022431 0.320182 0.705628
1 2 3 4 5 6 7 8
Weights 0.121436 -0.050762 -0.122966 -0.316802 0.196802 -0.133023 0.407605 0.802311
1 2 3 4 5 6 7 8
Weights -0.004795 0.065101 -0.046112 -0.088347 -0.138967 -0.088782 0.125547 1.074745
1 2 3 4 5 6 7 8
Weights 0.050305 0.046754 -0.036858 -0.357037 0.185521 -0.257964 0.39712 0.855066
1 2 3 4 5 6 7 8
Weights 0.017937 0.007475 -0.05983 -0.235111 0.136329 -0.045913 0.170515 0.925379
1 2 3 4 5 6 7 8
Weights -0.028377 0.101664 -0.095497 -0.172675 0.020411 -0.035677 0.124535 0.974494
1 2 3 4 5 6 7 8
Weights 0.145906 -0.002063 -0.137513 -0.389087 0.262634 -0.165484 0.400183 0.80865
1 2 3 4 5 6 7 8
Weights 0.000348 -0.051368 0.040684 -0.054338 0.031359 0.028928 0.123631 0.680547
1 2 3 4 5 6 7 8
Weights 0.132265 -0.164703 -0.016767 -0.244349 0.291416 -0.002259 0.183201 0.729533
1 2 3 4 5 6 7 8
Weights 0.071055 -0.061867 -0.025087 -0.205932 0.070024 -0.037223 0.114298 0.937594
1 2 3 4 5 6 7 8
Weights 0.054895 0.156303 -0.204337 -0.498399 0.439529 -0.229596 0.348434 0.859016
1 2 3 4 5 6 7 8
Weights 0.056157 -0.026931 -0.056475 -0.282134 0.108325 -0.002723 0.218458 0.868115
1 2 3 4 5 6 7 8
Weights 0.022628 0.021685 -0.113499 -0.115196 -0.063916 -0.029761 0.326826 0.827276
1 2 3 4 5 6 7 8
Weights 0.007212 -0.067917 0.010234 -0.198382 0.176608 -0.081103 0.269316 0.794504
1 2 3 4 5 6 7 8
Weights 0.038318 0.072916 -0.122666 -0.142673 -0.05483 -0.118014 0.293851 0.905891
1 2 3 4 5 6 7 8
Weights 0.076489 -0.078195 -0.038675 -0.276657 0.155338 0.01872 0.278357 0.75757
1 2 3 4 5 6 7 8
Weights -0.004612 -0.005406 -0.012608 -0.080438 0.085271 -0.2078 0.00521 1.05756
1 2 3 4 5 6 7 8
Weights 0.003257 0.03753 -0.092446 -0.243007 0.253946 -0.125979 0.213637 0.750165
1 2 3 4 5 6 7 8
Weights 0.058473 -0.061034 -0.045023 -0.154899 -0.0616 -0.070239 0.428858 0.787768
1 2 3 4 5 6 7 8
Weights 0.052523 0.067049 -0.187174 -0.213344 0.177299 -0.193946 0.604224 0.578856
1 2 3 4 5 6 7 8
Weights -0.046831 -0.008281 -0.018716 -0.131266 0.075204 0.047432 0.125258 0.848298
1 2 3 4 5 6 7 8
Weights 0.019236 0.067073 -0.070458 -0.08527 -0.021637 -0.304804 0.235741 1.068122
1 2 3 4 5 6 7 8
Weights 0.001194 0.009343 -0.073151 -0.104665 0.038748 -0.071102 0.114758 0.947472
1 2 3 4 5 6 7 8
Weights 0.066611 -0.028429 -0.017717 -0.393808 0.294151 -0.125583 0.146167 0.932871
1 2 3 4 5 6 7 8
Weights 0.048207 -0.021932 -0.002414 -0.208916 -0.061697 -0.012886 0.348182 0.778956
1 2 3 4 5 6 7 8
Weights -0.029516 -0.021633 -0.047778 -0.125807 0.021905 0.029285 0.30177 0.763568
1 2 3 4 5 6 7 8
Weights 0.108671 -0.151956 -0.015189 -0.358583 0.480901 0.009465 0.07539 0.770157
1 2 3 4 5 6 7 8
Weights -0.021643 0.111016 0.025623 -0.438047 0.236464 -0.120084 0.127084 0.921671
1 2 3 4 5 6 7 8
Weights 0.001598 -0.042055 0.07067 -0.133482 -0.025434 -0.1565 0.405607 0.764828
1 2 3 4 5 6 7 8
Weights 0.11593 -0.098026 -0.090663 -0.281834 0.230962 0.068214 0.191005 0.77143
1 2 3 4 5 6 7 8
Weights 0.053653 -0.049946 -0.001573 -0.184443 0.12713 -0.217054 0.21306 0.973931
1 2 3 4 5 6 7 8
Weights 0.050304 -0.076728 0.083469 -0.386328 0.247374 -0.194314 0.259072 0.945335
1 2 3 4 5 6 7 8
Weights 0.056867 0.001566 -0.028249 -0.141645 -0.144389 -0.05243 0.412532 0.791098
1 2 3 4 5 6 7 8
Weights 0.015879 0.028957 0.088813 -0.318443 0.109282 -0.294119 0.374902 0.865471
1 2 3 4 5 6 7 8
Weights 0.004463 0.114475 -0.075304 -0.151896 -0.096781 -0.08193 0.163005 1.023318
1 2 3 4 5 6 7 8
Weights 0.085679 -0.012467 -0.116035 -0.241119 0.159616 -0.022718 0.217282 0.825627
1 2 3 4 5 6 7 8
Weights -0.000487 -0.093616 0.004432 -0.021161 0.088554 0.078818 0.081449 0.774088
1 2 3 4 5 6 7 8
Weights 0.040951 -0.052473 -0.089409 -0.09744 -0.028633 0.031407 0.311913 0.7179
1 2 3 4 5 6 7 8
Weights 0.06046 0.014053 -0.091239 -0.167193 -0.05651 -0.059326 0.333631 0.855142
1 2 3 4 5 6 7 8
Weights 0.010687 -0.105991 0.008777 -0.140624 0.141125 0.031438 0.289218 0.699424
1 2 3 4 5 6 7 8
Weights 0.00565 -0.049823 0.200436 -0.286902 0.078474 -0.303567 0.149785 1.089546
1 2 3 4 5 6 7 8
Weights 0.167004 -0.058501 -0.179344 -0.385895 0.313874 0.05662 0.199036 0.769781
1 2 3 4 5 6 7 8
Weights 0.137095 0.002425 -0.179485 -0.509504 0.480519 -0.138197 0.287566 0.865306
1 2 3 4 5 6 7 8
Weights 0.039973 -0.079535 0.093582 -0.123885 -0.011413 -0.224465 0.097364 1.071927
1 2 3 4 5 6 7 8
Weights 0.057001 -0.085964 -0.01253 -0.140882 0.047908 0.004759 0.282091 0.677497
1 2 3 4 5 6 7 8
Weights 0.107694 -0.059109 -0.073478 -0.328345 0.241534 -0.163606 0.375498 0.808437
1 2 3 4 5 6 7 8
Weights 0.125058 -0.122812 -0.13114 -0.07049 0.083467 0.054382 0.172103 0.704715
1 2 3 4 5 6 7 8
Weights 0.023922 0.067646 0.060831 -0.446863 0.253962 -0.270328 0.199032 1.009877
1 2 3 4 5 6 7 8
Weights 0.02134 0.019742 -0.003048 -0.170212 -0.015408 -0.139754 0.185675 0.983093
1 2 3 4 5 6 7 8
Weights 0.025168 -0.033193 -0.006694 -0.266751 0.178022 -0.118036 0.305227 0.753646
1 2 3 4 5 6 7 8
Weights 0.153329 -0.01385 -0.190596 -0.365252 0.274071 -0.114849 0.425323 0.742579
1 2 3 4 5 6 7 8
Weights 0.048317 -0.00248 -0.013926 -0.236764 0.087882 -0.128371 0.075328 1.051367
1 2 3 4 5 6 7 8
Weights -0.009925 -0.034093 -0.054538 -0.169697 0.105755 -0.042976 0.241493 0.852912
1 2 3 4 5 6 7 8
Weights 0.032043 0.052846 -0.033501 -0.44532 0.348491 -0.134808 0.022297 1.059399
1 2 3 4 5 6 7 8
Weights -0.004569 0.015628 0.112962 -0.249622 0.065928 -0.370369 0.309269 1.005143
1 2 3 4 5 6 7 8
Weights 0.000282 0.092093 0.028133 -0.316779 0.149667 -0.234536 0.026558 1.148762
1 2 3 4 5 6 7 8
Weights 0.027294 -0.004852 0.056847 -0.245012 -0.021051 0.050638 0.161312 0.848206
1 2 3 4 5 6 7 8
Weights 0.075424 -0.066862 0.005835 -0.23699 0.004817 -0.011894 0.308466 0.780326
1 2 3 4 5 6 7 8
Weights 0.046587 -0.012016 0.099935 -0.360928 0.01351 -0.080145 0.31705 0.842984
1 2 3 4 5 6 7 8
Weights 0.067623 -0.032355 0.012431 -0.217377 0.045303 -0.193569 0.217484 1.001127
1 2 3 4 5 6 7 8
Weights 0.016117 0.029288 -0.094769 -0.054612 -0.088948 -0.003136 0.130439 0.958684
1 2 3 4 5 6 7 8
Weights 0.068882 -0.058801 -0.032723 -0.203721 0.073892 0.118142 0.349624 0.650194
1 2 3 4 5 6 7 8
Weights 0.083412 -0.0601 -0.028872 -0.335731 0.232141 -0.052839 0.249104 0.861497
1 2 3 4 5 6 7 8
Weights 0.137863 -0.01561 -0.136744 -0.194407 0.079464 0.048921 0.273386 0.670239
1 2 3 4 5 6 7 8
Weights 0.00591 0.001909 -0.057948 -0.230891 0.286201 -0.168795 0.211679 0.799441
1 2 3 4 5 6 7 8
Weights 0.023761 0.016365 -0.057672 -0.137481 0.040939 -0.140297 0.145701 0.983281
1 2 3 4 5 6 7 8
Weights -0.001339 0.083396 -0.072688 -0.116004 -0.12938 -0.077987 0.202789 1.00698
1 2 3 4 5 6 7 8
Weights 0.057209 0.150085 -0.177186 -0.406905 0.304025 -0.322524 0.40939 0.923028
1 2 3 4 5 6 7 8
Weights -0.0289 -0.009581 0.003802 -0.133951 -0.079449 0.247933 0.284123 0.472206
1 2 3 4 5 6 7 8
Weights 0.000707 -0.025751 -0.017106 -0.262947 0.207721 -0.146952 0.336774 0.802214
1 2 3 4 5 6 7 8
Weights 0.014138 0.030691 0.001199 -0.142999 -0.128928 -0.157109 0.361943 0.944032
1 2 3 4 5 6 7 8
Weights 0.008364 -0.043123 0.031396 -0.155403 0.033412 -0.066222 0.083271 1.032398
1 2 3 4 5 6 7 8
Weights 0.155309 0.057945 -0.213289 -0.489866 0.385392 -0.189266 0.403983 0.833478
1 2 3 4 5 6 7 8
Weights 0.003011 0.060543 -0.257935 -0.105103 0.103158 -0.01524 0.418318 0.59832
1 2 3 4 5 6 7 8
Weights 0.081039 -0.028266 -0.12021 -0.140154 0.069243 0.050027 0.192264 0.755768
1 2 3 4 5 6 7 8
Weights 0.052473 -0.167607 0.084594 -0.146717 0.175984 0.077454 0.143992 0.704439
1 2 3 4 5 6 7 8
Weights 0.056841 -0.019305 -0.015863 -0.262621 0.146327 -0.099994 0.205889 0.864716
1 2 3 4 5 6 7 8
Weights 0.121276 -0.133737 -0.177433 -0.196394 0.281382 -0.007659 0.340533 0.650969
1 2 3 4 5 6 7 8
Weights 0.018287 -0.004156 -0.15275 -0.239099 0.1649 0.115612 0.165283 0.794798
1 2 3 4 5 6 7 8
Weights 0.099053 0.004723 -0.147572 -0.237347 0.207135 0.001113 0.073619 0.853856
1 2 3 4 5 6 7 8
Weights 0.033153 -0.045189 0.115987 -0.341144 0.145565 -0.090403 0.298745 0.784309
1 2 3 4 5 6 7 8
Weights 0.056778 -0.022881 0.051306 -0.287788 0.112507 -0.264299 0.239898 0.99792
1 2 3 4 5 6 7 8
Weights -0.030711 0.10683 0.016481 -0.170475 -0.070932 -0.230203 0.248145 1.039366
1 2 3 4 5 6 7 8
Weights 0.015755 -0.047345 0.023492 -0.150508 0.101898 -0.018953 0.270595 0.713472
1 2 3 4 5 6 7 8
Weights -0.026106 0.070973 0.044914 -0.204738 -0.096697 -0.227557 0.367499 0.965693
Predicting...
t+1 t+2 t+3 t+4 t+5 t+6
R2 0.961290 -1.579792e+11 -6.635681e+04 -7.212816e+08 0.195001 0.407810
RMSE 75.711219 1.529505e+08 9.912815e+04 1.033483e+07 345.261946 296.129461
MSE 5732.188632 2.339386e+16 9.826390e+09 1.068087e+14 119205.811284 87692.657518
MAE 0.300410 1.082169e+05 1.072432e+02 5.553353e+03 1.499929 1.384843
MAPE 30.041006 1.082169e+07 1.072432e+04 5.553353e+05 149.992934 138.484256
MPE -15.247704 -3.427496e+06 8.447426e+03 -4.629583e+05 -63.636741 -71.331554
CPU times: user 2min 59s, sys: 1min 49s, total: 4min 49s
Wall time: 2min 35s
In [912]:
plot_specific(X_test, baseline_preds, ar_preds, ar_preds_s, order, limit_t, j, s)
In [932]:
plot_qualitative_analysis(ar_preds, X_test, limit_t, order, subway_stations, del_hours)
In [933]:
plot_qualitative_analysis(ar_preds_s, X_test, limit_t, order, subway_stations, del_hours)
In [913]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="General baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of General baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [914]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [915]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])
x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(ar_scores).T[1], np.array(ar_scores_s).T[1])).T
baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["General baseline", "Baseline per station", "AR", "AR per station"]
arlineObjects = plt.plot(x, model_scores, linewidth=3)

# ['D', '*', '|', 'X']
# labels = ["Full baseline", "Baseline per station", "Baseline per day",
#                    "Baseline per station and day", "Full AR", "AR per station"]
for i, m in zip(range(4), ['D', '*']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)
    
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

AR + Baseline

In [918]:
order, limit_t = 8, 6
In [919]:
def baseline_sub(X, baseline):
    
    return X.apply(lambda x: x - baseline.iloc[0], axis=(1, 2))
    
def baseline_add(X, baseline):
    
    return X.apply(lambda x: x + baseline.iloc[0], axis=(1, 2))
    
In [920]:
Xb_train = baseline_sub(X_train, baseline_preds[0])
Xb_test = baseline_sub(X_test, baseline_preds[0])
Xb_train_s = baseline_sub(X_train, baseline_preds[1])
Xb_test_s = baseline_sub(X_test, baseline_preds[1])
In [921]:
%%time
ar_preds, ar_scores = ar_plot_results(None, order, limit_t, X_train=Xb_train, X_test=Xb_test)
for t in range(limit_t):
    ar_preds[t] = baseline_add(ar_preds[t], baseline_preds[0])
Fitting...
1 2 3 4 5 6 7 8
Weights 0.072414 0.028176 -0.031006 -0.18143 -0.044451 -0.16222 0.273703 1.010436
Predicting...
t+1 t+2 t+3 t+4 t+5 t+6
R2 0.957307 -4.867925e+10 -5.791504e+05 -1.476468e+04 -0.016708 -0.183099
RMSE 76.631140 8.182732e+07 2.822425e+05 4.506642e+04 373.959482 403.400934
MSE 5872.331552 6.695710e+15 7.966083e+10 2.030982e+09 139845.694173 162732.313304
MAE -0.183396 -1.045197e+05 -2.510891e+02 -4.125299e+01 -0.930387 -1.075510
MAPE -18.339618 -1.045197e+07 -2.510891e+04 -4.125299e+03 -93.038670 -107.550988
MPE -4.077683 1.567388e+07 6.595581e+04 1.180699e+04 -49.099836 28.113765
CPU times: user 8.7 s, sys: 7.13 s, total: 15.8 s
Wall time: 8.79 s
In [922]:
%%time
ar_preds_s, ar_scores_s = ar_plot_results("s", order, limit_t,  X_train=Xb_train_s, X_test=Xb_test_s)
for t in range(limit_t):
    ar_preds_s[t] = baseline_add(ar_preds_s[t], baseline_preds[1]) 
Fitting...
1 2 3 4 5 6 7 8
Weights 0.014292 -0.054597 -0.058433 -0.059161 0.016652 0.246049 0.340732 0.386301
1 2 3 4 5 6 7 8
Weights 0.019469 0.001067 -0.016738 -0.015177 0.079676 0.135719 0.212522 0.421484
1 2 3 4 5 6 7 8
Weights 0.065481 -0.027831 -0.040753 -0.144498 0.041029 -0.019028 0.263727 0.731897
1 2 3 4 5 6 7 8
Weights 0.049163 -0.06599 0.023736 -0.099279 0.091252 0.104884 0.277008 0.505466
1 2 3 4 5 6 7 8
Weights -0.00621 0.039934 -0.042729 -0.007781 0.16141 0.07686 0.149769 0.582469
1 2 3 4 5 6 7 8
Weights 0.088081 -0.049448 -0.042759 -0.107178 0.02402 0.077228 0.20028 0.683887
1 2 3 4 5 6 7 8
Weights 0.075223 -0.04728 -0.016716 -0.170276 0.1164 0.174515 0.285408 0.502023
1 2 3 4 5 6 7 8
Weights 0.025312 0.006643 -0.144361 -0.017289 0.002359 0.081576 0.272425 0.647454
1 2 3 4 5 6 7 8
Weights 0.045797 -0.011377 -0.013707 -0.103484 -0.020122 0.031164 0.224812 0.771519
1 2 3 4 5 6 7 8
Weights 0.091892 -0.025247 -0.086898 -0.089535 -0.035949 0.006911 0.246322 0.798478
1 2 3 4 5 6 7 8
Weights 0.032322 -0.020464 0.003037 -0.158504 0.055891 0.03493 0.445864 0.484905
1 2 3 4 5 6 7 8
Weights 0.018983 0.001556 0.007647 -0.013761 0.039617 0.033287 0.235304 0.573795
1 2 3 4 5 6 7 8
Weights 0.086746 -0.139716 0.312901 -0.050903 0.219243 -0.021992 0.128171 0.398454
1 2 3 4 5 6 7 8
Weights 0.003008 -0.033606 0.041673 -0.042227 0.109929 0.052975 0.256813 0.510387
1 2 3 4 5 6 7 8
Weights 0.033511 -0.011839 -0.032472 -0.029178 0.066808 0.049985 0.215428 0.631247
1 2 3 4 5 6 7 8
Weights 0.014963 -0.025735 -0.029493 -0.01135 0.032345 0.014647 0.170337 0.717406
1 2 3 4 5 6 7 8
Weights 0.155094 -0.076093 -0.157476 -0.210959 0.320007 -0.001508 0.336044 0.608076
1 2 3 4 5 6 7 8
Weights 0.057716 -0.018707 -0.068801 -0.036565 0.022352 0.114739 0.239341 0.601197
1 2 3 4 5 6 7 8
Weights 0.038747 0.002414 -0.043233 -0.040073 0.160349 0.214254 0.19559 0.352951
1 2 3 4 5 6 7 8
Weights -0.04149 -0.028668 -0.02165 -0.013599 0.07128 0.144557 0.289496 0.506591
1 2 3 4 5 6 7 8
Weights 0.059927 -0.012527 -0.008268 -0.111474 0.0267 -0.015007 0.239399 0.74075
1 2 3 4 5 6 7 8
Weights 0.101615 -0.070067 -0.057805 -0.094638 -0.002324 0.141307 0.293016 0.584891
1 2 3 4 5 6 7 8
Weights 0.101451 -0.039263 -0.077492 -0.093085 -0.042702 0.019127 0.350161 0.668473
1 2 3 4 5 6 7 8
Weights 0.047963 0.0108 -0.085679 -0.110074 0.028092 -0.011997 0.28776 0.713803
1 2 3 4 5 6 7 8
Weights 0.017955 -0.02221 -0.043183 -0.029066 -0.026179 0.079139 0.153658 0.758226
1 2 3 4 5 6 7 8
Weights 0.030947 -0.061785 -0.058497 -0.062266 0.124091 0.094808 0.316371 0.512012
1 2 3 4 5 6 7 8
Weights 0.060011 -0.114113 -0.00872 -0.077915 0.152073 0.094779 0.314423 0.497925
1 2 3 4 5 6 7 8
Weights 0.020577 -0.054473 -0.022867 -0.013716 0.019943 0.144905 0.306188 0.507704
1 2 3 4 5 6 7 8
Weights 0.030429 -0.042931 0.00078 0.001188 0.10058 0.044503 0.24862 0.537061
1 2 3 4 5 6 7 8
Weights 0.009134 -0.023546 -0.03059 -0.030471 0.079347 0.098386 0.297413 0.466001
1 2 3 4 5 6 7 8
Weights -0.014233 -0.135404 0.239823 -0.141418 0.126208 -0.006287 0.210643 0.631176
1 2 3 4 5 6 7 8
Weights 0.040129 -0.014066 -0.05871 -0.139714 0.019394 0.049739 0.357369 0.66222
1 2 3 4 5 6 7 8
Weights 0.023929 -0.038373 0.069891 -0.132522 0.00047 -0.00542 0.219659 0.757599
1 2 3 4 5 6 7 8
Weights 0.097874 -0.027658 -0.111038 -0.153842 0.156735 0.07809 0.258105 0.599372
1 2 3 4 5 6 7 8
Weights 0.013887 -0.011569 0.028164 -0.068731 0.022898 -0.049206 0.220622 0.723985
1 2 3 4 5 6 7 8
Weights 0.056777 -0.036789 -0.099571 -0.086438 0.310674 0.161427 0.112182 0.444105
1 2 3 4 5 6 7 8
Weights 0.028365 -0.007037 -0.069885 -0.094432 0.094917 0.060428 0.296482 0.627505
1 2 3 4 5 6 7 8
Weights 0.05747 -0.033644 -0.066595 -0.054447 0.154315 0.158845 0.217429 0.456272
1 2 3 4 5 6 7 8
Weights 0.049397 -0.043607 -0.073429 -0.026492 0.097728 0.093046 0.26049 0.525257
1 2 3 4 5 6 7 8
Weights 0.037107 0.004012 -0.036068 -0.140827 0.118589 -0.050987 0.235516 0.778058
1 2 3 4 5 6 7 8
Weights 0.039639 -0.030605 -0.023904 -0.138258 0.054203 -0.010542 0.254571 0.770134
1 2 3 4 5 6 7 8
Weights 0.01777 -0.016594 -0.00268 0.031531 0.14036 0.146583 0.243352 0.323104
1 2 3 4 5 6 7 8
Weights -0.251508 0.380174 0.06723 -0.000807 0.039389 0.05863 0.068021 0.542769
1 2 3 4 5 6 7 8
Weights 0.003183 -0.020768 -0.051294 -0.087361 0.157976 0.201102 0.225169 0.458418
1 2 3 4 5 6 7 8
Weights -0.010037 -0.071101 -0.001961 -0.023314 0.108235 0.096025 0.219978 0.514997
1 2 3 4 5 6 7 8
Weights -0.005255 -0.032038 -0.027347 0.012843 0.038331 0.133568 0.246374 0.531337
1 2 3 4 5 6 7 8
Weights 0.052718 0.004063 0.006798 -0.121207 -0.052268 -0.071736 0.318759 0.788347
1 2 3 4 5 6 7 8
Weights 0.045974 -0.028134 0.000117 -0.019243 0.053236 0.061472 0.219145 0.553872
1 2 3 4 5 6 7 8
Weights 0.019581 0.024233 -0.026087 -0.120472 0.000788 0.089309 0.232483 0.717306
1 2 3 4 5 6 7 8
Weights 0.049238 -0.0687 -0.042971 -0.10926 0.031581 0.135161 0.318684 0.584668
1 2 3 4 5 6 7 8
Weights 0.093994 0.040181 -0.033602 -0.088538 0.230429 0.104822 0.108938 0.447827
1 2 3 4 5 6 7 8
Weights 0.03633 -0.089036 0.003873 -0.102471 0.067868 0.088684 0.132559 0.762784
1 2 3 4 5 6 7 8
Weights 0.06813 -0.047535 -0.015953 -0.027397 0.100411 0.146592 0.2471 0.412056
1 2 3 4 5 6 7 8
Weights -0.04416 -0.028007 0.003167 -0.033556 0.115366 0.145048 0.23319 0.532233
1 2 3 4 5 6 7 8
Weights 0.0461 -0.040247 -0.085202 -0.083078 0.169563 -0.034453 0.207027 0.734515
1 2 3 4 5 6 7 8
Weights -0.065905 -0.071679 -0.010267 0.066584 0.105811 0.296673 0.347337 0.150217
1 2 3 4 5 6 7 8
Weights 0.042653 0.002158 -0.070988 -0.056746 0.22633 0.215502 0.109904 0.346012
1 2 3 4 5 6 7 8
Weights 0.010734 -0.021375 -0.042183 -0.032248 -0.036899 0.0595 0.232355 0.748881
1 2 3 4 5 6 7 8
Weights 0.05339 -0.024323 -0.034921 0.005927 0.144551 0.072056 0.174061 0.47263
1 2 3 4 5 6 7 8
Weights 0.017974 0.024568 -0.036154 0.011243 -0.032287 -0.100808 0.137992 0.840825
1 2 3 4 5 6 7 8
Weights 0.018453 -0.021903 -0.010593 -0.00654 0.017577 0.123117 0.26352 0.484209
1 2 3 4 5 6 7 8
Weights 0.140712 -0.066896 -0.111341 -0.175351 0.099662 0.082802 0.212537 0.738437
1 2 3 4 5 6 7 8
Weights 0.009916 -0.03317 -0.074219 0.034271 0.217845 0.149493 0.141121 0.406203
1 2 3 4 5 6 7 8
Weights 0.077007 0.003986 0.010142 -0.020502 0.173306 0.037949 0.17293 0.44797
1 2 3 4 5 6 7 8
Weights -0.028248 -0.054486 -0.020602 0.017275 0.052381 0.205363 0.332949 0.341419
1 2 3 4 5 6 7 8
Weights 0.058672 0.080024 -0.009715 -0.093432 0.158141 0.023231 -0.017152 0.709155
1 2 3 4 5 6 7 8
Weights 0.058794 0.020057 -0.072969 -0.112278 -0.048825 0.038219 0.244406 0.77468
1 2 3 4 5 6 7 8
Weights 0.030064 -0.053371 -0.035511 -0.029739 0.012262 0.133065 0.280118 0.485916
1 2 3 4 5 6 7 8
Weights 0.102922 -0.039434 -0.116287 -0.094785 -0.041092 0.044992 0.24431 0.814361
1 2 3 4 5 6 7 8
Weights 0.09469 0.035056 -0.153234 -0.104026 0.184139 0.078657 0.223237 0.557995
1 2 3 4 5 6 7 8
Weights 0.065654 0.00861 -0.020104 -0.119366 -0.057591 0.031924 0.145008 0.835307
1 2 3 4 5 6 7 8
Weights -0.030458 -0.021954 0.002402 0.075916 0.063013 0.171499 0.301016 0.280445
1 2 3 4 5 6 7 8
Weights 0.025759 -0.074586 -0.075863 -0.095211 0.106387 0.172914 0.373446 0.427426
1 2 3 4 5 6 7 8
Weights 0.069544 0.001579 -0.056334 -0.12914 0.018021 0.100433 0.167031 0.744882
1 2 3 4 5 6 7 8
Weights -0.053264 -0.050679 -0.063769 -0.003377 0.130963 0.16611 0.354527 0.238969
1 2 3 4 5 6 7 8
Weights 0.004949 -0.003422 0.021115 0.015967 0.133749 0.130186 0.243309 0.374683
1 2 3 4 5 6 7 8
Weights 0.048551 0.001489 0.044155 0.053261 0.167687 0.090646 0.015049 0.311752
1 2 3 4 5 6 7 8
Weights 0.017422 -0.096583 0.03443 -0.112925 0.131189 0.084443 0.361772 0.489176
1 2 3 4 5 6 7 8
Weights -0.051876 -0.03348 0.010387 -0.008065 -0.003524 0.059263 0.335997 0.585455
1 2 3 4 5 6 7 8
Weights 0.018777 -0.024135 -0.008341 -0.039825 -0.051957 0.03432 0.309239 0.636904
1 2 3 4 5 6 7 8
Weights -0.00393 -0.000579 0.033219 0.005125 0.049894 0.14026 0.179379 0.429617
1 2 3 4 5 6 7 8
Weights 0.014852 0.024327 -0.07749 -0.041269 0.043352 0.120866 0.291498 0.498806
1 2 3 4 5 6 7 8
Weights 0.087845 -0.037159 0.027544 -0.173014 0.012083 -0.05099 0.314378 0.735539
1 2 3 4 5 6 7 8
Weights 0.093399 -0.031551 -0.04766 -0.179062 0.081696 0.074746 0.167593 0.754672
1 2 3 4 5 6 7 8
Weights 0.029591 -0.037969 0.060591 0.021576 0.040749 0.008891 0.087921 0.683312
1 2 3 4 5 6 7 8
Weights 0.057815 -0.062275 -0.068788 -0.069713 0.029827 0.030526 0.24033 0.720642
1 2 3 4 5 6 7 8
Weights 0.009941 -0.011076 -0.020281 -0.05789 0.043663 -0.034797 0.258494 0.674377
1 2 3 4 5 6 7 8
Weights 0.030131 -0.02954 -0.087466 -0.086145 -0.01067 0.118122 0.387709 0.578838
1 2 3 4 5 6 7 8
Weights 0.0108 -0.049301 0.05451 -0.03056 0.036792 0.080253 0.208103 0.558671
1 2 3 4 5 6 7 8
Weights 0.040048 -0.016035 -0.06378 -0.087829 0.00908 0.057842 0.327597 0.612399
1 2 3 4 5 6 7 8
Weights 0.036581 0.048855 0.038538 0.068542 0.095485 0.101236 0.138666 0.20349
1 2 3 4 5 6 7 8
Weights -0.007587 -0.00345 -0.029527 -0.021207 0.055142 0.151025 0.289181 0.428011
1 2 3 4 5 6 7 8
Weights 0.104477 -0.032761 -0.083592 -0.083342 -0.044704 -0.001797 0.256653 0.804881
1 2 3 4 5 6 7 8
Weights 0.025851 0.031906 0.00536 -0.140487 0.032029 -0.068404 0.244678 0.773545
1 2 3 4 5 6 7 8
Weights -0.023561 0.000687 -0.008646 -0.05598 0.032229 0.127693 0.249864 0.510504
1 2 3 4 5 6 7 8
Weights 0.019773 -0.110312 0.047445 0.063311 -0.047798 -0.001528 0.206069 0.741647
1 2 3 4 5 6 7 8
Weights 0.035145 0.004471 -0.018987 -0.08359 -0.001345 0.057581 0.318297 0.580184
1 2 3 4 5 6 7 8
Weights 0.060994 -0.009661 -0.083419 -0.068031 0.052634 0.031565 0.275254 0.674183
1 2 3 4 5 6 7 8
Weights 0.074309 -0.025377 -0.030555 -0.093872 -0.038674 0.00724 0.246527 0.768054
1 2 3 4 5 6 7 8
Weights 0.021254 -0.021038 -0.062222 -0.03979 -0.024977 0.028443 0.331139 0.635444
1 2 3 4 5 6 7 8
Weights 0.045017 -0.047783 -0.019653 -0.08183 0.046505 0.074196 0.282598 0.612964
1 2 3 4 5 6 7 8
Weights 0.017672 -0.029912 0.021684 -0.137357 0.056528 -0.008042 0.288033 0.68446
1 2 3 4 5 6 7 8
Weights 0.00427 -0.034254 -0.01345 -0.055071 0.109363 0.078154 0.300929 0.503958
1 2 3 4 5 6 7 8
Weights 0.018775 -0.054104 0.053789 -0.04535 0.089989 0.080867 0.262641 0.51917
1 2 3 4 5 6 7 8
Weights -0.017277 0.003385 -0.037771 -0.046749 0.056368 0.058871 0.287446 0.564316
1 2 3 4 5 6 7 8
Weights 0.02581 -0.067709 -0.003624 -0.082119 0.073583 0.078226 0.196196 0.706024
1 2 3 4 5 6 7 8
Weights 0.085031 -0.028475 -0.057144 -0.060406 0.107619 0.116495 0.165899 0.586742
1 2 3 4 5 6 7 8
Weights 0.027716 0.003307 -0.030727 -0.103606 0.093316 0.096489 0.192911 0.639853
1 2 3 4 5 6 7 8
Weights -0.033802 0.013993 0.008813 0.004984 0.027146 0.117922 0.294811 0.443746
1 2 3 4 5 6 7 8
Weights 0.083558 -0.033417 -0.095993 -0.128806 0.019636 0.080056 0.250842 0.704222
1 2 3 4 5 6 7 8
Weights -0.002756 -0.017721 -0.024296 -0.020195 0.087857 0.169235 0.305027 0.368402
1 2 3 4 5 6 7 8
Weights 0.041498 -0.016828 -0.106707 -0.094511 0.020973 0.163038 0.390902 0.459916
1 2 3 4 5 6 7 8
Weights -0.003464 0.001561 -0.079852 -0.021202 0.110623 0.140859 0.212834 0.501214
1 2 3 4 5 6 7 8
Weights 0.037172 -0.055949 -0.062755 -0.112313 0.088602 0.069235 0.348574 0.600213
1 2 3 4 5 6 7 8
Weights 0.038355 -0.036327 0.015414 -0.0011 0.123394 0.12906 0.245191 0.367805
1 2 3 4 5 6 7 8
Weights 0.032879 -0.029619 0.033991 -0.083316 0.052406 0.079898 0.191565 0.587957
1 2 3 4 5 6 7 8
Weights 0.047711 -0.092039 -0.073786 -0.040376 0.048394 0.105371 0.280643 0.622374
1 2 3 4 5 6 7 8
Weights -0.00715 -0.03219 -0.049961 0.042275 0.057427 0.076312 0.222649 0.559972
1 2 3 4 5 6 7 8
Weights 0.028726 -0.047574 -0.036698 -0.025595 0.03799 0.118755 0.266976 0.545019
1 2 3 4 5 6 7 8
Weights 0.005422 0.006298 -0.032678 0.007999 0.120087 0.111502 0.223948 0.33666
1 2 3 4 5 6 7 8
Weights 0.059141 -0.025492 -0.020393 -0.092185 0.032023 0.058281 0.26422 0.575301
1 2 3 4 5 6 7 8
Weights 0.091377 0.021589 -0.120637 -0.284346 0.328865 -0.059133 0.256536 0.670559
1 2 3 4 5 6 7 8
Weights 0.023669 0.002914 -0.09459 -0.00966 0.025419 0.121987 0.256081 0.53272
1 2 3 4 5 6 7 8
Weights 0.045582 -0.064809 0.003068 -0.109646 0.112271 0.064819 0.267835 0.565347
1 2 3 4 5 6 7 8
Weights 0.042365 -0.021143 -0.078722 -0.108689 0.067489 0.021487 0.316854 0.667136
1 2 3 4 5 6 7 8
Weights 0.072728 -0.012067 -0.085735 -0.112473 0.187207 0.257109 0.129435 0.489481
1 2 3 4 5 6 7 8
Weights -0.006898 -0.029774 0.023706 -0.01718 0.021704 0.062861 0.240867 0.615539
1 2 3 4 5 6 7 8
Weights 0.025906 -0.05834 0.003052 -0.028839 0.091775 0.078861 0.218424 0.591777
1 2 3 4 5 6 7 8
Weights 0.006774 -0.053872 -0.002182 0.014131 0.025526 0.103274 0.279395 0.521297
1 2 3 4 5 6 7 8
Weights -0.003959 -0.02007 -0.055655 -0.090509 0.073206 0.081495 0.406349 0.511339
1 2 3 4 5 6 7 8
Weights 0.102049 -0.03796 -0.036268 -0.172476 0.036594 0.076793 0.283871 0.650956
1 2 3 4 5 6 7 8
Weights 0.059243 -0.038844 0.037421 -0.158974 0.245815 0.164135 0.068339 0.519035
1 2 3 4 5 6 7 8
Weights 0.059232 0.000543 -0.11018 -0.023064 -0.002563 0.044513 0.189019 0.71212
1 2 3 4 5 6 7 8
Weights 0.03869 -0.034538 -0.068765 -0.155976 0.112687 -0.008871 0.34954 0.679873
1 2 3 4 5 6 7 8
Weights -0.006997 -0.037129 -0.034239 0.037466 0.036853 0.11828 0.148523 0.631926
1 2 3 4 5 6 7 8
Weights 0.071924 -0.037078 0.027411 -0.134572 0.119547 -0.01436 0.321546 0.571635
1 2 3 4 5 6 7 8
Weights -0.013387 0.015783 0.006674 -0.049523 0.043027 0.055147 0.154766 0.731357
1 2 3 4 5 6 7 8
Weights 0.051745 -0.022574 -0.023049 -0.106482 -0.024481 0.012496 0.310239 0.691247
1 2 3 4 5 6 7 8
Weights -0.02572 0.015554 -0.057028 -0.013975 0.082557 0.048111 0.263484 0.615572
1 2 3 4 5 6 7 8
Weights 0.039392 -0.012216 -0.02489 0.08026 0.074973 0.077728 0.243475 0.439577
1 2 3 4 5 6 7 8
Weights -0.046589 -0.064249 0.081141 0.002105 0.104859 0.04874 0.184522 0.585117
1 2 3 4 5 6 7 8
Weights 0.088742 -0.025158 -0.073189 -0.143537 0.00137 0.057282 0.28108 0.704308
1 2 3 4 5 6 7 8
Weights -0.013027 -0.006982 -0.056141 -0.062578 0.122686 0.058341 0.270338 0.586579
1 2 3 4 5 6 7 8
Weights 0.060506 -0.040755 -0.030264 -0.120699 0.046001 0.097989 0.298132 0.555098
1 2 3 4 5 6 7 8
Weights 0.076387 -0.053777 -0.051617 -0.049885 -0.002374 0.074141 0.301792 0.607425
1 2 3 4 5 6 7 8
Weights 0.040227 -0.030398 -0.049612 -0.051027 0.178508 0.122917 0.204977 0.476998
1 2 3 4 5 6 7 8
Weights -0.057534 0.002322 -0.029445 -0.005576 0.092836 0.156904 0.281053 0.453341
1 2 3 4 5 6 7 8
Weights 0.02977 -0.044653 -0.015615 -0.023889 0.024482 0.057804 0.233486 0.663161
1 2 3 4 5 6 7 8
Weights 0.050608 -0.033278 -0.118008 -0.048937 0.053615 0.090936 0.227709 0.671776
1 2 3 4 5 6 7 8
Weights 0.037363 -0.040934 -0.064428 -0.081668 0.080485 0.050459 0.364687 0.556949
1 2 3 4 5 6 7 8
Weights 0.000267 -0.008376 -0.090645 -0.081181 0.011028 0.130721 0.446878 0.484063
1 2 3 4 5 6 7 8
Weights 0.048576 -0.008688 -0.060534 -0.11149 0.090613 0.067802 0.187465 0.695076
1 2 3 4 5 6 7 8
Weights 0.088244 0.006526 -0.111862 -0.167135 0.297413 0.027906 0.289556 0.453633
1 2 3 4 5 6 7 8
Weights 0.002611 -0.038868 -0.061509 -0.033468 0.105061 0.131873 0.345558 0.444043
1 2 3 4 5 6 7 8
Weights 0.007865 -0.082269 -0.072374 -0.026386 0.092847 0.175933 0.387404 0.389089
1 2 3 4 5 6 7 8
Weights -0.003351 0.012734 0.035206 0.004377 0.066445 0.115759 0.273642 0.370983
1 2 3 4 5 6 7 8
Weights -0.007688 -0.029497 0.003723 0.019877 0.120556 0.125351 0.206095 0.468625
1 2 3 4 5 6 7 8
Weights 0.04884 0.016999 -0.052212 -0.129517 -0.110423 0.091014 0.314063 0.703501
1 2 3 4 5 6 7 8
Weights -0.089116 0.044848 0.07278 -0.055014 0.029623 0.061503 0.178125 0.636966
1 2 3 4 5 6 7 8
Weights 0.04244 -0.041742 -0.081719 -0.023296 0.102822 0.16364 0.234017 0.525174
1 2 3 4 5 6 7 8
Weights 0.025578 -0.010993 -0.062495 -0.095301 0.183131 -0.051988 0.186166 0.753462
1 2 3 4 5 6 7 8
Weights 0.036356 -0.015797 -0.02464 -0.074451 0.018053 0.026396 0.285382 0.664416
1 2 3 4 5 6 7 8
Weights 0.026383 -0.016054 -0.020733 -0.049495 0.013607 0.099445 0.171678 0.682953
1 2 3 4 5 6 7 8
Weights 0.011117 -0.041477 -0.037133 -0.013117 0.021937 0.158771 0.285636 0.509473
1 2 3 4 5 6 7 8
Weights -0.039003 -0.012181 0.019089 -0.004848 0.091057 0.070091 0.289763 0.467549
1 2 3 4 5 6 7 8
Weights -0.032661 -0.054959 0.05899 -0.004024 -0.033651 0.056482 0.286851 0.602178
1 2 3 4 5 6 7 8
Weights -0.017993 -0.022901 -0.01821 0.002202 0.113908 0.088011 0.221095 0.555332
1 2 3 4 5 6 7 8
Weights -0.003032 -0.013088 -0.050733 -0.005404 0.121792 0.172923 0.325796 0.328229
1 2 3 4 5 6 7 8
Weights 0.021917 -0.023505 -0.025625 -0.032015 0.004581 0.033288 0.261477 0.670044
1 2 3 4 5 6 7 8
Weights 0.063367 -0.011034 -0.013948 -0.089916 0.026223 0.100454 0.27442 0.561189
1 2 3 4 5 6 7 8
Weights 0.015905 -0.028117 -0.055465 -0.049057 0.037489 0.088512 0.266915 0.621325
1 2 3 4 5 6 7 8
Weights 0.069085 -0.038713 -0.068513 -0.113623 -0.043174 0.107846 0.33572 0.651187
1 2 3 4 5 6 7 8
Weights 0.045512 -0.01206 -0.017815 -0.098239 -0.043658 0.027479 0.194326 0.818247
1 2 3 4 5 6 7 8
Weights 0.010121 0.053401 -0.028107 -0.07325 -0.120162 -0.029267 0.395154 0.676109
1 2 3 4 5 6 7 8
Weights 0.04806 -0.024184 -0.093617 -0.059378 0.094447 0.087514 0.214092 0.599213
1 2 3 4 5 6 7 8
Weights 0.021287 -0.066975 0.004327 -0.00165 -0.068345 -0.052512 0.269768 0.797518
1 2 3 4 5 6 7 8
Weights 0.001164 -0.039563 0.037372 -0.055804 0.085286 0.084874 0.227083 0.544243
1 2 3 4 5 6 7 8
Weights 0.016033 -0.007233 -0.042934 -0.087644 0.019694 0.130226 0.344202 0.537136
1 2 3 4 5 6 7 8
Weights 0.014398 0.001208 0.034193 -0.127 0.020091 0.052515 0.266751 0.67235
1 2 3 4 5 6 7 8
Weights 0.079061 -0.076548 -0.08749 -0.133027 -0.004819 0.211519 0.402708 0.471768
1 2 3 4 5 6 7 8
Weights -0.030195 0.031312 -0.002024 0.156021 -0.067055 0.207724 -0.379071 0.936203
1 2 3 4 5 6 7 8
Weights 0.030163 -0.025292 -0.079929 -0.050566 0.044907 0.15086 0.395379 0.378697
1 2 3 4 5 6 7 8
Weights 0.05721 -0.062886 -0.054963 -0.161344 0.284058 0.040334 0.429101 0.375906
1 2 3 4 5 6 7 8
Weights 0.053188 -0.038325 -0.023045 -0.076949 -0.046277 0.090877 0.327492 0.592501
1 2 3 4 5 6 7 8
Weights -0.024343 0.001661 0.01716 -0.042933 0.026483 0.122227 0.293972 0.49521
1 2 3 4 5 6 7 8
Weights 0.082431 0.054472 0.114091 0.058364 0.017032 0.043538 -0.002203 0.408519
1 2 3 4 5 6 7 8
Weights 0.035031 -0.018299 -0.015347 -0.139189 0.179559 0.004524 0.246774 0.567486
1 2 3 4 5 6 7 8
Weights 0.017548 0.056553 -0.117812 -0.045482 -0.068799 -0.048415 0.147869 0.987674
1 2 3 4 5 6 7 8
Weights 0.025338 -0.018822 -0.002246 -0.047742 0.018118 0.070962 0.271107 0.586487
1 2 3 4 5 6 7 8
Weights 0.011905 -0.020813 -0.01445 -0.113019 0.062826 0.096025 0.382709 0.484665
1 2 3 4 5 6 7 8
Weights 0.008313 -0.023072 -0.057424 -0.036542 0.198359 0.063891 0.121928 0.605693
1 2 3 4 5 6 7 8
Weights 0.019551 -0.028673 -0.03443 -0.046897 -0.050578 0.121512 0.288616 0.627525
1 2 3 4 5 6 7 8
Weights -0.006328 -0.007851 -0.045662 -0.007465 0.051699 0.044219 0.208722 0.696577
1 2 3 4 5 6 7 8
Weights 0.010214 0.009947 -0.04657 -0.017864 0.030667 0.123511 0.210293 0.584531
1 2 3 4 5 6 7 8
Weights 0.025008 -0.03808 -0.038993 -0.002901 0.074794 0.106874 0.228667 0.486396
1 2 3 4 5 6 7 8
Weights -0.017201 -0.008002 0.042779 -0.015073 -0.006062 0.023811 0.260855 0.66195
1 2 3 4 5 6 7 8
Weights 0.053687 -0.057553 -0.032021 -0.06153 0.079332 0.046969 0.31587 0.564042
1 2 3 4 5 6 7 8
Weights 0.057091 -0.043554 -0.042118 -0.084486 0.057952 0.122402 0.277456 0.542087
1 2 3 4 5 6 7 8
Weights 0.056119 -0.034912 -0.018687 -0.112007 0.011595 0.128109 0.288341 0.554834
1 2 3 4 5 6 7 8
Weights 0.06514 0.017189 -0.006313 0.119015 0.050501 0.09044 0.019663 0.499954
1 2 3 4 5 6 7 8
Weights 0.041291 -0.037857 -0.059017 -0.041691 -0.000944 0.042269 0.288828 0.680474
1 2 3 4 5 6 7 8
Weights 0.027419 -0.001921 -0.019646 -0.06569 -0.004218 0.095527 0.334625 0.546682
1 2 3 4 5 6 7 8
Weights 0.091148 -0.080122 -0.127917 -0.142553 0.138378 0.04861 0.290211 0.709627
1 2 3 4 5 6 7 8
Weights 0.021984 0.024126 -0.032119 -0.066401 -0.053476 0.024253 0.209302 0.773632
1 2 3 4 5 6 7 8
Weights 0.001664 -0.094583 -0.023707 -0.043008 0.070748 0.11424 0.307345 0.543788
1 2 3 4 5 6 7 8
Weights 0.019026 -0.088236 -0.059056 -0.114074 0.194322 0.026794 0.315822 0.601912
1 2 3 4 5 6 7 8
Weights 0.044932 -0.015639 -0.033235 -0.090646 0.004875 0.111019 0.304742 0.557864
1 2 3 4 5 6 7 8
Weights 0.034568 -0.029035 -0.021994 0.068897 0.072234 0.105018 0.195986 0.342257
1 2 3 4 5 6 7 8
Weights 0.023798 -0.004123 0.00544 -0.100904 0.141025 0.013347 0.211798 0.62511
1 2 3 4 5 6 7 8
Weights -0.022381 0.024789 -0.032534 -0.080584 0.035426 0.177969 0.380284 0.366522
1 2 3 4 5 6 7 8
Weights 0.028066 -0.008668 -0.082613 -0.082875 0.039173 0.095691 0.2895 0.611767
1 2 3 4 5 6 7 8
Weights 0.102886 -0.048213 -0.048095 -0.050741 0.323258 0.12297 0.061631 0.453325
1 2 3 4 5 6 7 8
Weights 0.050765 -0.029262 -0.096839 -0.182762 0.147138 -0.035728 0.376298 0.688028
1 2 3 4 5 6 7 8
Weights 0.041908 -0.035221 -0.091364 -0.096801 0.110667 0.083696 0.342668 0.539455
1 2 3 4 5 6 7 8
Weights 0.069837 -0.034265 -0.083133 -0.05173 -0.03916 0.026554 0.307679 0.729604
1 2 3 4 5 6 7 8
Weights 0.045288 -0.041035 0.011281 -0.043959 0.094126 0.087808 0.188599 0.570793
1 2 3 4 5 6 7 8
Weights 0.025642 -0.036371 -0.056527 -0.0669 0.155331 0.017369 0.305887 0.56892
1 2 3 4 5 6 7 8
Weights 0.02992 -0.043483 -0.036054 0.001151 0.125193 0.2079 0.122293 0.395201
1 2 3 4 5 6 7 8
Weights -0.022236 -0.043452 0.027458 -0.016376 0.102531 0.104384 0.251104 0.507975
1 2 3 4 5 6 7 8
Weights 0.037312 -0.093508 -0.008495 -0.13185 0.033446 0.10742 0.353216 0.602673
1 2 3 4 5 6 7 8
Weights 0.016229 0.016479 -0.053324 -0.078431 -0.077601 0.094713 0.277644 0.708903
1 2 3 4 5 6 7 8
Weights 0.083934 -0.053566 -0.049664 -0.162193 0.046144 0.063303 0.335623 0.633986
1 2 3 4 5 6 7 8
Weights -0.036427 0.008827 -0.061541 -0.041932 0.035033 0.131341 0.232051 0.628613
1 2 3 4 5 6 7 8
Weights 0.008851 -0.026195 -0.063483 -0.083602 0.079269 0.137774 0.226332 0.590214
1 2 3 4 5 6 7 8
Weights 0.057248 -0.066849 -0.058444 -0.121142 0.049141 0.086787 0.395531 0.557706
1 2 3 4 5 6 7 8
Weights 0.032668 0.034028 0.045382 0.05762 0.018863 0.085542 0.044607 0.506466
1 2 3 4 5 6 7 8
Weights 0.081327 -0.032029 0.019392 -0.047924 0.085501 0.045603 0.17851 0.58239
1 2 3 4 5 6 7 8
Weights 0.017116 -0.043459 -0.002904 -0.051497 0.067512 0.06843 0.127 0.682755
1 2 3 4 5 6 7 8
Weights 0.067704 -0.006735 -0.145938 -0.13566 0.129983 -0.03441 0.391361 0.652
1 2 3 4 5 6 7 8
Weights 0.027094 -0.01868 -0.103234 -0.062139 0.040444 0.141821 0.256246 0.609206
1 2 3 4 5 6 7 8
Weights -0.030851 -0.042566 -0.06539 -0.027247 0.053171 0.169057 0.402273 0.416813
1 2 3 4 5 6 7 8
Weights -0.003068 -0.039072 -0.01856 -0.035877 0.109336 0.127797 0.273485 0.50995
1 2 3 4 5 6 7 8
Weights 0.016821 0.011232 -0.099351 -0.029909 0.00451 0.035428 0.338063 0.568825
1 2 3 4 5 6 7 8
Weights -0.000136 -0.038352 -0.021362 -0.009384 0.089747 0.138121 0.242705 0.505855
1 2 3 4 5 6 7 8
Weights 0.009751 0.001069 -0.03919 0.009737 0.062675 -0.061272 0.14163 0.711461
1 2 3 4 5 6 7 8
Weights 0.005422 -0.013905 -0.025466 -0.044357 0.140194 0.070812 0.206003 0.531709
1 2 3 4 5 6 7 8
Weights -0.001398 -0.071306 -0.062247 -0.02879 0.049214 0.139377 0.40273 0.452235
1 2 3 4 5 6 7 8
Weights 0.038379 -0.012703 -0.074409 -0.137423 0.246789 0.0183 0.340238 0.468133
1 2 3 4 5 6 7 8
Weights -0.023772 0.023443 -0.007773 -0.010562 0.084407 0.135851 0.161959 0.545262
1 2 3 4 5 6 7 8
Weights 0.04758 -0.001757 -0.084745 -0.064191 -0.041367 -0.010463 0.375311 0.683311
1 2 3 4 5 6 7 8
Weights 0.012239 -0.015124 -0.093236 -0.013794 0.022573 0.078084 0.23774 0.659479
1 2 3 4 5 6 7 8
Weights 0.061945 -0.04888 -0.073366 -0.120878 0.079068 0.07275 0.267271 0.656325
1 2 3 4 5 6 7 8
Weights -0.00941 -0.034604 -0.001157 -0.066991 0.056222 0.173203 0.28983 0.431025
1 2 3 4 5 6 7 8
Weights -0.006302 -0.059301 -0.016462 -0.031487 0.082488 0.159965 0.362422 0.397609
1 2 3 4 5 6 7 8
Weights 0.082014 -0.082642 0.026768 0.018664 0.145363 0.069858 0.136252 0.532999
1 2 3 4 5 6 7 8
Weights 0.045829 0.015571 -0.066949 -0.16503 0.143939 0.066214 0.235053 0.601227
1 2 3 4 5 6 7 8
Weights 0.015189 -0.020777 0.039832 -0.010958 0.007726 0.024309 0.322324 0.522412
1 2 3 4 5 6 7 8
Weights 0.033792 -0.043653 -0.049285 -0.010253 0.106093 0.129283 0.152522 0.57167
1 2 3 4 5 6 7 8
Weights 0.049875 -0.045965 -0.024334 -0.037315 0.024982 -0.006737 0.219009 0.748974
1 2 3 4 5 6 7 8
Weights 0.045833 -0.075729 -0.028419 -0.150609 0.128246 0.04894 0.301305 0.664644
1 2 3 4 5 6 7 8
Weights 0.013006 -0.019558 -0.059107 0.008906 -0.055379 0.103587 0.378455 0.564797
1 2 3 4 5 6 7 8
Weights 0.054749 -0.007771 -0.018719 -0.14647 0.100222 -0.011853 0.38245 0.551897
1 2 3 4 5 6 7 8
Weights 0.04568 0.00397 -0.098264 -0.060442 -0.010658 0.079042 0.263861 0.660339
1 2 3 4 5 6 7 8
Weights 0.051703 -0.02858 -0.031022 -0.077126 0.067882 0.10383 0.211458 0.599234
1 2 3 4 5 6 7 8
Weights 0.050216 -0.008785 0.024256 0.020866 0.051133 0.085547 0.178442 0.512973
1 2 3 4 5 6 7 8
Weights -0.019487 -0.059437 0.00503 0.028855 0.083096 0.129289 0.311074 0.404856
1 2 3 4 5 6 7 8
Weights 0.027877 -0.029318 -0.069939 -0.052587 0.027767 0.119621 0.384195 0.491265
1 2 3 4 5 6 7 8
Weights -0.008978 -0.021469 0.035259 0.02427 0.076306 0.125588 0.215627 0.48295
1 2 3 4 5 6 7 8
Weights 0.0636 -0.024503 -0.022088 -0.156954 0.016067 0.030308 0.283455 0.720506
1 2 3 4 5 6 7 8
Weights 0.078762 -0.065558 -0.180196 -0.049002 0.190181 0.159563 0.21064 0.550926
1 2 3 4 5 6 7 8
Weights 0.036509 -0.067909 -0.032768 -0.140752 0.17241 0.032392 0.278085 0.650335
1 2 3 4 5 6 7 8
Weights 0.040941 -0.05633 -0.012052 -0.054033 0.024235 0.081474 0.098154 0.750796
1 2 3 4 5 6 7 8
Weights -0.009757 -0.019533 0.057161 0.029663 0.084109 0.077139 0.220377 0.42639
1 2 3 4 5 6 7 8
Weights 0.032931 -0.084186 0.025475 -0.070454 0.104173 -0.003397 0.224754 0.661159
1 2 3 4 5 6 7 8
Weights 0.086111 -0.066399 -0.064664 0.050598 0.084576 0.126364 0.096687 0.537032
1 2 3 4 5 6 7 8
Weights 0.075681 -0.017376 -0.043464 -0.128587 0.035391 -0.029949 0.314339 0.719839
1 2 3 4 5 6 7 8
Weights 0.013602 0.001844 -0.020883 -0.037485 -0.010353 0.010274 0.233276 0.691849
1 2 3 4 5 6 7 8
Weights 0.03221 -0.055788 0.006407 -0.087849 0.058896 0.096217 0.303408 0.528003
1 2 3 4 5 6 7 8
Weights 0.058172 -0.047301 -0.072544 -0.092638 0.047014 0.113451 0.347225 0.55856
1 2 3 4 5 6 7 8
Weights 0.047914 -0.027717 -0.04927 -0.116498 0.012883 0.087257 0.222546 0.707013
1 2 3 4 5 6 7 8
Weights -0.032854 -0.05356 0.003854 -0.015037 0.037734 0.097123 0.315158 0.555075
1 2 3 4 5 6 7 8
Weights 0.029305 -0.021183 -0.045572 -0.136843 0.085141 0.0211 0.132656 0.842707
1 2 3 4 5 6 7 8
Weights 0.050688 -0.014915 -0.041278 -0.103639 -0.026629 0.049502 0.341325 0.652597
1 2 3 4 5 6 7 8
Weights 0.099982 0.000147 -0.093534 -0.129565 -0.022875 0.031469 0.264067 0.765442
1 2 3 4 5 6 7 8
Weights 0.032317 -0.006323 0.005025 -0.067174 0.011338 0.146871 0.174683 0.587623
1 2 3 4 5 6 7 8
Weights 0.042059 -0.058138 0.003605 -0.084645 0.032075 0.119294 0.298072 0.532984
1 2 3 4 5 6 7 8
Weights 0.0314 -0.026067 -0.003532 -0.163033 0.064402 0.139904 0.274777 0.573264
1 2 3 4 5 6 7 8
Weights 0.069481 -0.025061 -0.084575 -0.095769 0.007378 0.066203 0.287699 0.683152
1 2 3 4 5 6 7 8
Weights -0.005766 0.0084 -0.062694 0.016665 -0.009214 0.080334 0.193555 0.690359
1 2 3 4 5 6 7 8
Weights 0.055196 -0.053439 -0.027325 -0.156842 0.0686 0.137743 0.330456 0.616854
1 2 3 4 5 6 7 8
Weights 0.023663 -0.045109 0.028546 -0.044559 0.07161 0.042245 0.237963 0.635386
1 2 3 4 5 6 7 8
Weights 0.035515 0.027969 -0.065146 0.007278 0.08759 0.145795 0.236655 0.424242
1 2 3 4 5 6 7 8
Weights 0.054262 -0.065416 -0.031064 -0.062637 0.18627 0.06983 0.15237 0.579441
1 2 3 4 5 6 7 8
Weights 0.047672 -0.040363 -0.055321 -0.034079 -0.002923 0.077307 0.264308 0.635963
1 2 3 4 5 6 7 8
Weights 0.017542 -0.018457 -0.084967 -0.023975 -0.030594 0.080681 0.336634 0.621801
1 2 3 4 5 6 7 8
Weights 0.072143 -0.019337 -0.08467 -0.186566 0.030684 0.009925 0.414129 0.692411
1 2 3 4 5 6 7 8
Weights 0.011445 0.006945 0.011578 -0.000563 0.054928 0.184642 0.190732 0.300711
1 2 3 4 5 6 7 8
Weights -0.052609 0.000974 0.003096 -0.08356 0.178121 0.013856 0.233518 0.600473
1 2 3 4 5 6 7 8
Weights 0.023782 -0.027211 -0.011756 -0.083033 -0.09991 0.07456 0.359487 0.677201
1 2 3 4 5 6 7 8
Weights -0.025593 -0.024525 -0.004962 0.009199 0.066712 -0.011419 0.193336 0.720796
1 2 3 4 5 6 7 8
Weights 0.089752 -0.066774 -0.05207 -0.198617 0.114763 0.016647 0.383687 0.639384
1 2 3 4 5 6 7 8
Weights -0.032455 -0.071945 -0.091195 -0.00095 0.146936 0.191039 0.380884 0.283811
1 2 3 4 5 6 7 8
Weights 0.03813 -0.023783 -0.081472 -0.012619 0.095653 0.15024 0.210579 0.498407
1 2 3 4 5 6 7 8
Weights 0.027163 -0.049626 0.140745 0.039632 0.00896 0.127761 0.113264 0.529777
1 2 3 4 5 6 7 8
Weights 0.024285 0.016421 -0.042882 -0.068742 0.065345 0.099781 0.250662 0.567434
1 2 3 4 5 6 7 8
Weights 0.021248 -0.059992 -0.055221 0.012838 0.15157 0.104816 0.254042 0.467491
1 2 3 4 5 6 7 8
Weights 0.052417 -0.046154 -0.092934 -0.0645 0.086412 0.137668 0.231105 0.59059
1 2 3 4 5 6 7 8
Weights 0.072043 -0.035122 -0.075717 -0.038832 0.182906 0.122409 0.086179 0.56753
1 2 3 4 5 6 7 8
Weights 0.02442 -0.021667 0.065328 -0.091955 0.107769 0.055904 0.222386 0.572996
1 2 3 4 5 6 7 8
Weights 0.074345 -0.058651 -0.037693 -0.118062 -0.002175 0.04713 0.335857 0.649169
1 2 3 4 5 6 7 8
Weights 0.030514 0.01747 -0.028547 -0.108957 -0.042734 -0.043121 0.359003 0.692706
1 2 3 4 5 6 7 8
Weights 0.012057 0.018506 0.055381 0.010071 0.082631 0.112532 0.171071 0.422266
1 2 3 4 5 6 7 8
Weights -0.00025 -0.006047 -0.049494 -0.072361 -0.035916 0.058472 0.38924 0.618655
Predicting...
t+1 t+2 t+3 t+4 t+5 t+6
R2 0.782397 -3.995382e+02 -1.224716e+02 -9.774241e+01 -1.111024 -0.248487
RMSE 55.445032 2.378768e+03 1.320729e+03 1.181087e+03 172.693742 132.807313
MSE 3074.151561 5.658536e+06 1.744325e+06 1.394967e+06 29823.128452 17637.782373
MAE 0.101027 2.718213e+00 2.035256e+00 -1.911682e+00 0.182380 0.098470
MAPE 10.102655 2.718213e+02 2.035256e+02 -1.911682e+02 18.237989 9.846998
MPE 46.548729 8.975921e+02 4.246552e+02 4.632503e+02 122.715144 97.065722
CPU times: user 2min 51s, sys: 1min 50s, total: 4min 42s
Wall time: 2min 30s
In [923]:
plot_specific(X_test, baseline_preds, ar_preds, ar_preds_s, order, limit_t, j, s)
In [934]:
plot_qualitative_analysis(ar_preds, X_test, limit_t, order, subway_stations, del_hours)
In [935]:
plot_qualitative_analysis(ar_preds_s, X_test, limit_t, order, subway_stations, del_hours)
In [924]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 'None'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR + baseline")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="General baseline")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of full baseline and AR model, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);
In [925]:
fig, ax = plt.subplots(1, figsize=(10, 6))
ax.set_prop_cycle(color=['darkgoldenrod', 'brown'])
x = range(1, limit_t+1)
baseline_score = df_baseline_scores.loc['RMSE', 's'].repeat(limit_t).reshape(-1, limit_t).T
model_score = np.array(ar_scores_s).T[1]

ax = plt.plot(x, model_score, linewidth=3, label="AR per station + baseline")
ax = plt.scatter(x, model_score, marker='*', s=100)
ax = plt.plot(x, baseline_score, linewidth=3, label="Baseline per station")
ax = plt.scatter(x, baseline_score, marker='*', s=100)

plt.legend(prop={'size': 20})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Compute and Compare

In [930]:
fig, ax = plt.subplots(1, figsize=(16, 6))
ax.set_prop_cycle(color=['crimson', 'teal', 'b', 'darkgoldenrod'])

x = range(1, limit_t+1)
baseline_scores = df_baseline_scores.loc['RMSE'].values.repeat(limit_t).reshape(-1, limit_t).T
model_scores = np.vstack((np.array(ar_scores).T[1], np.array(ar_scores_s).T[1])).T

baselineObjects = plt.plot(x, baseline_scores, linewidth=3)
labels = ["General baseline", "Baseline per station", "AR + baseline", "AR per station + baseline"]

arlineObjects = plt.plot(x, model_scores, linewidth=3)

for i, m in zip(range(4), ['D', '*']):
    ax = plt.scatter(x, baseline_scores[:, i], marker=m, s=100)

for i, m in zip(range(2), ['D', '*']):
    ax = plt.scatter(x, model_scores[:, i], marker=m, s=100)

plt.ylim((0, model_scores[:, 1].max()))
plt.legend(baselineObjects+arlineObjects, labels, prop={'size': 15})
plt.title("RMSE of baseline and AR model for each station, from $t+1$ to $t+{}$".format(limit_t), fontsize=16)
plt.xlabel("T plus", fontsize=16); plt.ylabel("RMSE", fontsize=16);

Brouillon